X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fdf-problems.c;h=25bbba61bb1d05b7f82bb99d0c014c98fa76e5bc;hb=653d0639db9f426c9cc72bdb620d2d06055af72e;hp=84dc42a13612928db7f1cc0349ee6dd62e0afd0b;hpb=8dc2823a2f3ba69e89bcb0189f9e2f95021f0cb6;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/df-problems.c b/gcc/df-problems.c index 84dc42a1361..25bbba61bb1 100644 --- a/gcc/df-problems.c +++ b/gcc/df-problems.c @@ -1,7 +1,7 @@ /* Standard problems for dataflow support routines. Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, - 2008, 2009 Free Software Foundation, Inc. - Originally contributed by Michael P. Hayes + 2008, 2009, 2010 Free Software Foundation, Inc. + Originally contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com) Major rewrite contributed by Danny Berlin (dberlin@dberlin.org) and Kenneth Zadeck (zadeck@naturalbridge.com). @@ -47,7 +47,7 @@ along with GCC; see the file COPYING3. If not see /* Note that turning REG_DEAD_DEBUGGING on will cause gcc.c-torture/unsorted/dump-noaddr.c to fail because it prints - addresses in the dumps. */ + addresses in the dumps. */ #if 0 #define REG_DEAD_DEBUGGING #endif @@ -73,7 +73,7 @@ df_get_live_out (basic_block bb) if (df_live) return DF_LIVE_OUT (bb); - else + else return DF_LR_OUT (bb); } @@ -89,7 +89,7 @@ df_get_live_in (basic_block bb) if (df_live) return DF_LIVE_IN (bb); - else + else return DF_LR_IN (bb); } @@ -136,7 +136,7 @@ df_chain_dump (struct df_link *link, FILE *file) /* Print some basic block info as part of df_dump. */ -void +void df_print_bb_index (basic_block bb, FILE *file) { edge e; @@ -147,37 +147,16 @@ df_print_bb_index (basic_block bb, FILE *file) { basic_block pred = e->src; fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : ""); - } + } fprintf (file, ")->[%d]->( ", bb->index); FOR_EACH_EDGE (e, ei, bb->succs) { basic_block succ = e->dest; fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : ""); - } + } fprintf (file, ")\n"); } - - -/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set - up correctly. */ - -static void -df_set_seen (void) -{ - seen_in_block = BITMAP_ALLOC (&df_bitmap_obstack); - seen_in_insn = BITMAP_ALLOC (&df_bitmap_obstack); -} - - -static void -df_unset_seen (void) -{ - BITMAP_FREE (seen_in_block); - BITMAP_FREE (seen_in_insn); -} - - /*---------------------------------------------------------------------------- REACHING DEFINITIONS @@ -189,12 +168,12 @@ df_unset_seen (void) ----------------------------------------------------------------------------*/ /* This problem plays a large number of games for the sake of - efficiency. - + efficiency. + 1) The order of the bits in the bitvectors. After the scanning phase, all of the defs are sorted. All of the defs for the reg 0 are first, followed by all defs for reg 1 and so on. - + 2) There are two kill sets, one if the number of defs is less or equal to DF_SPARSE_THRESHOLD and another if the number of defs is greater. @@ -216,8 +195,8 @@ df_unset_seen (void) struct df_rd_problem_data { /* The set of defs to regs invalidated by call. */ - bitmap sparse_invalidated_by_call; - /* The set of defs to regs invalidate by call for rd. */ + bitmap sparse_invalidated_by_call; + /* The set of defs to regs invalidate by call for rd. */ bitmap dense_invalidated_by_call; /* An obstack for the bitmaps we need for this problem. */ bitmap_obstack rd_bitmaps; @@ -226,7 +205,7 @@ struct df_rd_problem_data /* Set basic block info. */ static void -df_rd_set_bb_info (unsigned int index, +df_rd_set_bb_info (unsigned int index, struct df_rd_bb_info *bb_info) { gcc_assert (df_rd); @@ -238,7 +217,7 @@ df_rd_set_bb_info (unsigned int index, /* Free basic block info. */ static void -df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, +df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, void *vbb_info) { struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info; @@ -257,7 +236,7 @@ df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, /* Allocate or reset bitmaps for DF_RD blocks. The solution bits are not touched unless the block is new. */ -static void +static void df_rd_alloc (bitmap all_blocks) { unsigned int bb_index; @@ -265,7 +244,7 @@ df_rd_alloc (bitmap all_blocks) struct df_rd_problem_data *problem_data; if (!df_rd->block_pool) - df_rd->block_pool = create_alloc_pool ("df_rd_block pool", + df_rd->block_pool = create_alloc_pool ("df_rd_block pool", sizeof (struct df_rd_bb_info), 50); if (df_rd->problem_data) @@ -274,7 +253,7 @@ df_rd_alloc (bitmap all_blocks) bitmap_clear (problem_data->sparse_invalidated_by_call); bitmap_clear (problem_data->dense_invalidated_by_call); } - else + else { problem_data = XNEW (struct df_rd_problem_data); df_rd->problem_data = problem_data; @@ -296,13 +275,13 @@ df_rd_alloc (bitmap all_blocks) { struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); if (bb_info) - { + { bitmap_clear (bb_info->kill); bitmap_clear (bb_info->sparse_kill); bitmap_clear (bb_info->gen); } else - { + { bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool); df_rd_set_bb_info (bb_index, bb_info); bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps); @@ -331,8 +310,8 @@ df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd) { unsigned int dregno = DF_REF_REGNO (def); if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) - bitmap_clear_range (local_rd, - DF_DEFS_BEGIN (dregno), + bitmap_clear_range (local_rd, + DF_DEFS_BEGIN (dregno), DF_DEFS_COUNT (dregno)); bitmap_set_bit (local_rd, DF_REF_ID (def)); } @@ -357,10 +336,10 @@ df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, || (dregno >= FIRST_PSEUDO_REGISTER)) { if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) - bitmap_clear_range (local_rd, - DF_DEFS_BEGIN (dregno), + bitmap_clear_range (local_rd, + DF_DEFS_BEGIN (dregno), DF_DEFS_COUNT (dregno)); - if (!(DF_REF_FLAGS (def) + if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) bitmap_set_bit (local_rd, DF_REF_ID (def)); } @@ -373,7 +352,7 @@ df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, of kill sets. */ static void -df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, +df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, df_ref *def_rec, int top_flag) { @@ -385,12 +364,12 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, unsigned int regno = DF_REF_REGNO (def); unsigned int begin = DF_DEFS_BEGIN (regno); unsigned int n_defs = DF_DEFS_COUNT (regno); - + if ((!(df->changeable_flags & DF_NO_HARD_REGS)) || (regno >= FIRST_PSEUDO_REGISTER)) { /* Only the last def(s) for a regno in the block has any - effect. */ + effect. */ if (!bitmap_bit_p (seen_in_block, regno)) { /* The first def for regno in insn gets to knock out the @@ -398,7 +377,7 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, if ((!bitmap_bit_p (seen_in_insn, regno)) /* If the def is to only part of the reg, it does not kill the other defs that reach here. */ - && (!(DF_REF_FLAGS (def) & + && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)))) { if (n_defs > DF_SPARSE_THRESHOLD) @@ -412,11 +391,11 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info, bitmap_clear_range (bb_info->gen, begin, n_defs); } } - + bitmap_set_bit (seen_in_insn, regno); /* All defs for regno in the instruction may be put into the gen set. */ - if (!(DF_REF_FLAGS (def) + if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) bitmap_set_bit (bb_info->gen, DF_REF_ID (def)); } @@ -440,7 +419,7 @@ df_rd_bb_local_compute (unsigned int bb_index) /* Artificials are only hard regs. */ if (!(df->changeable_flags & DF_NO_HARD_REGS)) - df_rd_bb_local_compute_process_def (bb_info, + df_rd_bb_local_compute_process_def (bb_info, df_get_artificial_defs (bb_index), 0); @@ -451,7 +430,7 @@ df_rd_bb_local_compute (unsigned int bb_index) if (!INSN_P (insn)) continue; - df_rd_bb_local_compute_process_def (bb_info, + df_rd_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0); /* This complex dance with the two bitmaps is required because @@ -468,7 +447,7 @@ df_rd_bb_local_compute (unsigned int bb_index) are going backwards through the block and these are logically at the start. */ if (!(df->changeable_flags & DF_NO_HARD_REGS)) - df_rd_bb_local_compute_process_def (bb_info, + df_rd_bb_local_compute_process_def (bb_info, df_get_artificial_defs (bb_index), DF_REF_AT_TOP); } @@ -487,7 +466,8 @@ df_rd_local_compute (bitmap all_blocks) bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call; bitmap dense_invalidated = problem_data->dense_invalidated_by_call; - df_set_seen (); + seen_in_block = BITMAP_ALLOC (&df_bitmap_obstack); + seen_in_insn = BITMAP_ALLOC (&df_bitmap_obstack); df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG); @@ -495,24 +475,26 @@ df_rd_local_compute (bitmap all_blocks) { df_rd_bb_local_compute (bb_index); } - + /* Set up the knockout bit vectors to be applied across EH_EDGES. */ EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, regno, bi) { if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD) bitmap_set_bit (sparse_invalidated, regno); else - bitmap_set_range (dense_invalidated, - DF_DEFS_BEGIN (regno), + bitmap_set_range (dense_invalidated, + DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); } - df_unset_seen (); + + BITMAP_FREE (seen_in_block); + BITMAP_FREE (seen_in_insn); } /* Initialize the solution bit vectors for problem. */ -static void +static void df_rd_init_solution (bitmap all_blocks) { unsigned int bb_index; @@ -521,7 +503,7 @@ df_rd_init_solution (bitmap all_blocks) EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index); - + bitmap_copy (bb_info->out, bb_info->gen); bitmap_clear (bb_info->in); } @@ -535,7 +517,7 @@ df_rd_confluence_n (edge e) bitmap op1 = df_rd_get_bb_info (e->dest->index)->in; bitmap op2 = df_rd_get_bb_info (e->src->index)->out; - if (e->flags & EDGE_FAKE) + if (e->flags & EDGE_FAKE) return; if (e->flags & EDGE_EH) @@ -553,8 +535,8 @@ df_rd_confluence_n (edge e) EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi) { - bitmap_clear_range (tmp, - DF_DEFS_BEGIN (regno), + bitmap_clear_range (tmp, + DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); } bitmap_ior_into (op1, tmp); @@ -581,7 +563,7 @@ df_rd_transfer_function (int bb_index) if (bitmap_empty_p (sparse_kill)) return bitmap_ior_and_compl (out, gen, in, kill); - else + else { struct df_rd_problem_data *problem_data; bool changed = false; @@ -595,8 +577,8 @@ df_rd_transfer_function (int bb_index) bitmap_copy (tmp, in); EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi) { - bitmap_clear_range (tmp, - DF_DEFS_BEGIN (regno), + bitmap_clear_range (tmp, + DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); } bitmap_and_compl_into (tmp, kill); @@ -607,7 +589,7 @@ df_rd_transfer_function (int bb_index) BITMAP_FREE (out); bb_info->out = tmp; } - else + else BITMAP_FREE (tmp); return changed; } @@ -626,7 +608,7 @@ df_rd_free (void) { free_alloc_pool (df_rd->block_pool); bitmap_obstack_release (&problem_data->rd_bitmaps); - + df_rd->block_info_size = 0; free (df_rd->block_info); free (df_rd->problem_data); @@ -644,8 +626,8 @@ df_rd_start_dump (FILE *file) = (struct df_rd_problem_data *) df_rd->problem_data; unsigned int m = DF_REG_SIZE(df); unsigned int regno; - - if (!df_rd->block_info) + + if (!df_rd->block_info) return; fprintf (file, ";; Reaching defs:\n\n"); @@ -657,8 +639,8 @@ df_rd_start_dump (FILE *file) for (regno = 0; regno < m; regno++) if (DF_DEFS_COUNT (regno)) - fprintf (file, "%d[%d,%d] ", regno, - DF_DEFS_BEGIN (regno), + fprintf (file, "%d[%d,%d] ", regno, + DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno)); fprintf (file, "\n"); @@ -673,7 +655,7 @@ df_rd_top_dump (basic_block bb, FILE *file) struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index); if (!bb_info || !bb_info->in) return; - + fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in)); dump_bitmap (file, bb_info->in); fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen)); @@ -691,7 +673,7 @@ df_rd_bottom_dump (basic_block bb, FILE *file) struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index); if (!bb_info || !bb_info->out) return; - + fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out)); dump_bitmap (file, bb_info->out); } @@ -708,8 +690,8 @@ static struct df_problem problem_RD = df_rd_local_compute, /* Local compute function. */ df_rd_init_solution, /* Init the solution specific data. */ df_worklist_dataflow, /* Worklist solver. */ - NULL, /* Confluence operator 0. */ - df_rd_confluence_n, /* Confluence operator n. */ + NULL, /* Confluence operator 0. */ + df_rd_confluence_n, /* Confluence operator n. */ df_rd_transfer_function, /* Transfer function. */ NULL, /* Finalize function. */ df_rd_free, /* Free all of the problem information. */ @@ -720,15 +702,14 @@ static struct df_problem problem_RD = NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ - TV_DF_RD, /* Timing variable. */ + TV_DF_RD, /* Timing variable. */ true /* Reset blocks on dropping out of blocks_to_analyze. */ }; -/* Create a new DATAFLOW instance and add it to an existing instance - of DF. The returned structure is what is used to get at the - solution. */ +/* Create a new RD instance and add it to the existing instance + of DF. */ void df_rd_add_problem (void) @@ -758,7 +739,7 @@ struct df_lr_problem_data /* Set basic block info. */ static void -df_lr_set_bb_info (unsigned int index, +df_lr_set_bb_info (unsigned int index, struct df_lr_bb_info *bb_info) { gcc_assert (df_lr); @@ -766,11 +747,11 @@ df_lr_set_bb_info (unsigned int index, df_lr->block_info[index] = bb_info; } - + /* Free basic block info. */ static void -df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, +df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, void *vbb_info) { struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info; @@ -788,14 +769,14 @@ df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, /* Allocate or reset bitmaps for DF_LR blocks. The solution bits are not touched unless the block is new. */ -static void +static void df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { unsigned int bb_index; bitmap_iterator bi; if (!df_lr->block_pool) - df_lr->block_pool = create_alloc_pool ("df_lr_block pool", + df_lr->block_pool = create_alloc_pool ("df_lr_block pool", sizeof (struct df_lr_bb_info), 50); df_grow_bb_info (df_lr); @@ -804,12 +785,12 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index); if (bb_info) - { + { bitmap_clear (bb_info->def); bitmap_clear (bb_info->use); } else - { + { bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool); df_lr_set_bb_info (bb_index, bb_info); bb_info->use = BITMAP_ALLOC (NULL); @@ -825,7 +806,7 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) /* Reset the global solution for recalculation. */ -static void +static void df_lr_reset (bitmap all_blocks) { unsigned int bb_index; @@ -877,8 +858,8 @@ df_lr_bb_local_compute (unsigned int bb_index) { unsigned int uid = INSN_UID (insn); - if (!INSN_P (insn)) - continue; + if (!NONDEBUG_INSN_P (insn)) + continue; for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) { @@ -914,7 +895,7 @@ df_lr_bb_local_compute (unsigned int bb_index) bitmap_clear_bit (bb_info->use, dregno); } } - + #ifdef EH_USES /* Process the uses that are live into an exception handler. */ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) @@ -942,12 +923,12 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) { unsigned int bb_index; bitmap_iterator bi; - + bitmap_clear (df->hardware_regs_used); - + /* The all-important stack pointer must always be live. */ bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM); - + /* Before reload, there are a few registers that must be forced live everywhere -- which might not already be the case for blocks within infinite loops. */ @@ -956,21 +937,21 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) /* Any reference to any pseudo before reload is a potential reference of the frame pointer. */ bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM); - + #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM /* Pseudos with argument area equivalences may require reloading via the argument pointer. */ if (fixed_regs[ARG_POINTER_REGNUM]) bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM); #endif - + /* Any constant, or pseudo with constant equivalences, may require reloading from memory using the pic register. */ if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM); } - + EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi) { if (bb_index == EXIT_BLOCK) @@ -990,7 +971,7 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) /* Initialize the solution vectors. */ -static void +static void df_lr_init (bitmap all_blocks) { unsigned int bb_index; @@ -1014,7 +995,7 @@ df_lr_confluence_0 (basic_block bb) bitmap op1 = df_lr_get_bb_info (bb->index)->out; if (bb != EXIT_BLOCK_PTR) bitmap_copy (op1, df->hardware_regs_used); -} +} /* Confluence function that ignores fake edges. */ @@ -1024,7 +1005,7 @@ df_lr_confluence_n (edge e) { bitmap op1 = df_lr_get_bb_info (e->src->index)->out; bitmap op2 = df_lr_get_bb_info (e->dest->index)->in; - + /* Call-clobbered registers die across exception and call edges. */ /* ??? Abnormal call edges ignored for the moment, as this gets confused by sibling call edges, which crashes reg-stack. */ @@ -1034,7 +1015,7 @@ df_lr_confluence_n (edge e) bitmap_ior_into (op1, op2); bitmap_ior_into (op1, df->hardware_regs_used); -} +} /* Transfer function. */ @@ -1106,7 +1087,7 @@ df_lr_free (void) } } free_alloc_pool (df_lr->block_pool); - + df_lr->block_info_size = 0; free (df_lr->block_info); } @@ -1125,7 +1106,7 @@ df_lr_top_dump (basic_block bb, FILE *file) struct df_lr_problem_data *problem_data; if (!bb_info || !bb_info->in) return; - + fprintf (file, ";; lr in \t"); df_print_regset (file, bb_info->in); if (df_lr->problem_data) @@ -1138,7 +1119,7 @@ df_lr_top_dump (basic_block bb, FILE *file) df_print_regset (file, bb_info->use); fprintf (file, ";; lr def \t"); df_print_regset (file, bb_info->def); -} +} /* Debugging info at bottom of bb. */ @@ -1150,7 +1131,7 @@ df_lr_bottom_dump (basic_block bb, FILE *file) struct df_lr_problem_data *problem_data; if (!bb_info || !bb_info->out) return; - + fprintf (file, ";; lr out \t"); df_print_regset (file, bb_info->out); if (df_lr->problem_data) @@ -1159,7 +1140,7 @@ df_lr_bottom_dump (basic_block bb, FILE *file) fprintf (file, ";; old out \t"); df_print_regset (file, problem_data->out[bb->index]); } -} +} /* Build the datastructure to verify that the solution to the dataflow @@ -1176,7 +1157,7 @@ df_lr_verify_solution_start (void) return; } - /* Set it true so that the solution is recomputed. */ + /* Set it true so that the solution is recomputed. */ df_lr->solutions_dirty = true; problem_data = XNEW (struct df_lr_problem_data); @@ -1250,8 +1231,8 @@ static struct df_problem problem_LR = df_lr_local_compute, /* Local compute function. */ df_lr_init, /* Init the solution specific data. */ df_worklist_dataflow, /* Worklist solver. */ - df_lr_confluence_0, /* Confluence operator 0. */ - df_lr_confluence_n, /* Confluence operator n. */ + df_lr_confluence_0, /* Confluence operator 0. */ + df_lr_confluence_n, /* Confluence operator n. */ df_lr_transfer_function, /* Transfer function. */ df_lr_finalize, /* Finalize function. */ df_lr_free, /* Free all of the problem information. */ @@ -1262,7 +1243,7 @@ static struct df_problem problem_LR = df_lr_verify_solution_start,/* Incremental solution verify start. */ df_lr_verify_solution_end, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ - TV_DF_LR, /* Timing variable. */ + TV_DF_LR, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -1313,7 +1294,7 @@ df_lr_verify_transfer_functions (void) /* Make a copy of the transfer functions and then compute new ones to see if the transfer functions have changed. */ - if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions, + if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions, bb->index)) { bitmap_copy (saved_def, bb_info->def); @@ -1331,7 +1312,7 @@ df_lr_verify_transfer_functions (void) /* If we do not have basic block info, the block must be in the list of dirty blocks or else some one has added a block behind our backs. */ - gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions, + gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions, bb->index)); } /* Make sure no one created a block without following @@ -1340,8 +1321,8 @@ df_lr_verify_transfer_functions (void) } /* Make sure there are no dirty bits in blocks that have been deleted. */ - gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions, - all_blocks)); + gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions, + all_blocks)); BITMAP_FREE (saved_def); BITMAP_FREE (saved_use); @@ -1368,7 +1349,7 @@ df_lr_verify_transfer_functions (void) Then, the in and out sets for the LIVE problem itself are computed. These are the logical AND of the IN and OUT sets from the LR problem - and the must-initialized problem. + and the must-initialized problem. ----------------------------------------------------------------------------*/ /* Private data used to verify the solution for this problem. */ @@ -1386,7 +1367,7 @@ static bitmap df_live_scratch; /* Set basic block info. */ static void -df_live_set_bb_info (unsigned int index, +df_live_set_bb_info (unsigned int index, struct df_live_bb_info *bb_info) { gcc_assert (df_live); @@ -1398,7 +1379,7 @@ df_live_set_bb_info (unsigned int index, /* Free basic block info. */ static void -df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, +df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, void *vbb_info) { struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info; @@ -1416,14 +1397,14 @@ df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, /* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are not touched unless the block is new. */ -static void +static void df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { unsigned int bb_index; bitmap_iterator bi; if (!df_live->block_pool) - df_live->block_pool = create_alloc_pool ("df_live_block pool", + df_live->block_pool = create_alloc_pool ("df_live_block pool", sizeof (struct df_live_bb_info), 100); if (!df_live_scratch) df_live_scratch = BITMAP_ALLOC (NULL); @@ -1434,12 +1415,12 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index); if (bb_info) - { + { bitmap_clear (bb_info->kill); bitmap_clear (bb_info->gen); } else - { + { bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool); df_live_set_bb_info (bb_index, bb_info); bb_info->kill = BITMAP_ALLOC (NULL); @@ -1454,7 +1435,7 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) /* Reset the global solution for recalculation. */ -static void +static void df_live_reset (bitmap all_blocks) { unsigned int bb_index; @@ -1536,7 +1517,7 @@ df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) df_grow_insn_info (); - EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, + EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi) { df_live_bb_local_compute (bb_index); @@ -1548,7 +1529,7 @@ df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) /* Initialize the solution vectors. */ -static void +static void df_live_init (bitmap all_blocks) { unsigned int bb_index; @@ -1573,12 +1554,12 @@ df_live_confluence_n (edge e) { bitmap op1 = df_live_get_bb_info (e->dest->index)->in; bitmap op2 = df_live_get_bb_info (e->src->index)->out; - - if (e->flags & EDGE_FAKE) + + if (e->flags & EDGE_FAKE) return; bitmap_ior_into (op1, op2); -} +} /* Transfer function for the forwards must-initialized problem. */ @@ -1593,10 +1574,9 @@ df_live_transfer_function (int bb_index) bitmap gen = bb_info->gen; bitmap kill = bb_info->kill; - /* We need to use a scratch set here so that the value returned from - this function invocation properly reflects if the sets changed in - a significant way; i.e. not just because the lr set was anded - in. */ + /* We need to use a scratch set here so that the value returned from this + function invocation properly reflects whether the sets changed in a + significant way; i.e. not just because the lr set was anded in. */ bitmap_and (df_live_scratch, gen, bb_lr_info->out); /* No register may reach a location where it is not used. Thus we trim the rr result to the places where it is used. */ @@ -1621,13 +1601,13 @@ df_live_finalize (bitmap all_blocks) { struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index); struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index); - + /* No register may reach a location where it is not used. Thus we trim the rr result to the places where it is used. */ bitmap_and_into (bb_live_info->in, bb_lr_info->in); bitmap_and_into (bb_live_info->out, bb_lr_info->out); } - + df_live->solutions_dirty = false; } } @@ -1641,7 +1621,7 @@ df_live_free (void) if (df_live->block_info) { unsigned int i; - + for (i = 0; i < df_live->block_info_size; i++) { struct df_live_bb_info *bb_info = df_live_get_bb_info (i); @@ -1653,7 +1633,7 @@ df_live_free (void) BITMAP_FREE (bb_info->out); } } - + free_alloc_pool (df_live->block_pool); df_live->block_info_size = 0; free (df_live->block_info); @@ -1676,7 +1656,7 @@ df_live_top_dump (basic_block bb, FILE *file) if (!bb_info || !bb_info->in) return; - + fprintf (file, ";; live in \t"); df_print_regset (file, bb_info->in); if (df_live->problem_data) @@ -1702,7 +1682,7 @@ df_live_bottom_dump (basic_block bb, FILE *file) if (!bb_info || !bb_info->out) return; - + fprintf (file, ";; live out \t"); df_print_regset (file, bb_info->out); if (df_live->problem_data) @@ -1728,7 +1708,7 @@ df_live_verify_solution_start (void) return; } - /* Set it true so that the solution is recomputed. */ + /* Set it true so that the solution is recomputed. */ df_live->solutions_dirty = true; problem_data = XNEW (struct df_live_problem_data); @@ -1797,8 +1777,8 @@ static struct df_problem problem_LIVE = df_live_local_compute, /* Local compute function. */ df_live_init, /* Init the solution specific data. */ df_worklist_dataflow, /* Worklist solver. */ - NULL, /* Confluence operator 0. */ - df_live_confluence_n, /* Confluence operator n. */ + NULL, /* Confluence operator 0. */ + df_live_confluence_n, /* Confluence operator n. */ df_live_transfer_function, /* Transfer function. */ df_live_finalize, /* Finalize function. */ df_live_free, /* Free all of the problem information. */ @@ -1836,7 +1816,7 @@ df_live_set_all_dirty (void) { basic_block bb; FOR_ALL_BB (bb) - bitmap_set_bit (df_live->out_of_date_transfer_functions, + bitmap_set_bit (df_live->out_of_date_transfer_functions, bb->index); } @@ -1871,7 +1851,7 @@ df_live_verify_transfer_functions (void) /* Make a copy of the transfer functions and then compute new ones to see if the transfer functions have changed. */ - if (!bitmap_bit_p (df_live->out_of_date_transfer_functions, + if (!bitmap_bit_p (df_live->out_of_date_transfer_functions, bb->index)) { bitmap_copy (saved_gen, bb_info->gen); @@ -1889,7 +1869,7 @@ df_live_verify_transfer_functions (void) /* If we do not have basic block info, the block must be in the list of dirty blocks or else some one has added a block behind our backs. */ - gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions, + gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions, bb->index)); } /* Make sure no one created a block without following @@ -1898,8 +1878,8 @@ df_live_verify_transfer_functions (void) } /* Make sure there are no dirty bits in blocks that have been deleted. */ - gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions, - all_blocks)); + gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions, + all_blocks)); BITMAP_FREE (saved_gen); BITMAP_FREE (saved_kill); BITMAP_FREE (all_blocks); @@ -1925,7 +1905,7 @@ df_chain_create (df_ref src, df_ref dst) { struct df_link *head = DF_REF_CHAIN (src); struct df_link *link = (struct df_link *) pool_alloc (df_chain->block_pool); - + DF_REF_CHAIN (src) = link; link->next = head; link->ref = dst; @@ -1934,7 +1914,7 @@ df_chain_create (df_ref src, df_ref dst) /* Delete any du or ud chains that start at REF and point to - TARGET. */ + TARGET. */ static void df_chain_unlink_1 (df_ref ref, df_ref target) { @@ -1977,10 +1957,10 @@ df_chain_unlink (df_ref ref) /* Copy the du or ud chain starting at FROM_REF and attach it to - TO_REF. */ + TO_REF. */ -void -df_chain_copy (df_ref to_ref, +void +df_chain_copy (df_ref to_ref, struct df_link *from_ref) { while (from_ref) @@ -1999,7 +1979,7 @@ df_chain_remove_problem (void) bitmap_iterator bi; unsigned int bb_index; - /* Wholesale destruction of the old chains. */ + /* Wholesale destruction of the old chains. */ if (df_chain->block_pool) free_alloc_pool (df_chain->block_pool); @@ -2016,11 +1996,11 @@ df_chain_remove_problem (void) if (df_chain_problem_p (DF_UD_CHAIN)) for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++) DF_REF_CHAIN (*use_rec) = NULL; - + FOR_BB_INSNS (bb, insn) { unsigned int uid = INSN_UID (insn); - + if (INSN_P (insn)) { if (df_chain_problem_p (DF_DU_CHAIN)) @@ -2055,11 +2035,11 @@ df_chain_fully_remove_problem (void) /* Create def-use or use-def chains. */ -static void +static void df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { df_chain_remove_problem (); - df_chain->block_pool = create_alloc_pool ("df_chain_block pool", + df_chain->block_pool = create_alloc_pool ("df_chain_block pool", sizeof (struct df_link), 50); df_chain->optional_p = true; } @@ -2083,7 +2063,7 @@ df_chain_create_bb_process_use (bitmap local_rd, { bitmap_iterator bi; unsigned int def_index; - + while (*use_rec) { df_ref use = *use_rec; @@ -2099,13 +2079,13 @@ df_chain_create_bb_process_use (bitmap local_rd, { unsigned int first_index = DF_DEFS_BEGIN (uregno); unsigned int last_index = first_index + count - 1; - + EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi) { df_ref def; - if (def_index > last_index) + if (def_index > last_index) break; - + def = DF_DEFS_GET (def_index); if (df_chain_problem_p (DF_DU_CHAIN)) df_chain_create (def, use); @@ -2140,16 +2120,16 @@ df_chain_create_bb (unsigned int bb_index) #ifdef EH_USES /* Create the chains for the artificial uses from the EH_USES at the beginning of the block. */ - + /* Artificials are only hard regs. */ if (!(df->changeable_flags & DF_NO_HARD_REGS)) df_chain_create_bb_process_use (cpy, - df_get_artificial_uses (bb->index), + df_get_artificial_uses (bb->index), DF_REF_AT_TOP); #endif df_rd_simulate_artificial_defs_at_top (bb, cpy); - + /* Process the regular instructions next. */ FOR_BB_INSNS (bb, insn) if (INSN_P (insn)) @@ -2170,7 +2150,7 @@ df_chain_create_bb (unsigned int bb_index) at the end of the block. */ if (!(df->changeable_flags & DF_NO_HARD_REGS)) df_chain_create_bb_process_use (cpy, - df_get_artificial_uses (bb->index), + df_get_artificial_uses (bb->index), 0); BITMAP_FREE (cpy); @@ -2184,7 +2164,7 @@ df_chain_finalize (bitmap all_blocks) { unsigned int bb_index; bitmap_iterator bi; - + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) { df_chain_create_bb (bb_index); @@ -2214,7 +2194,7 @@ df_chain_top_dump (basic_block bb, FILE *file) df_ref *def_rec = df_get_artificial_defs (bb->index); if (*def_rec) { - + fprintf (file, ";; DU chains for artificial defs\n"); while (*def_rec) { @@ -2224,7 +2204,7 @@ df_chain_top_dump (basic_block bb, FILE *file) fprintf (file, "\n"); def_rec++; } - } + } FOR_BB_INSNS (bb, insn) { @@ -2234,9 +2214,9 @@ df_chain_top_dump (basic_block bb, FILE *file) def_rec = DF_INSN_INFO_DEFS (insn_info); if (*def_rec) { - fprintf (file, ";; DU chains for insn luid %d uid %d\n", + fprintf (file, ";; DU chains for insn luid %d uid %d\n", DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); - + while (*def_rec) { df_ref def = *def_rec; @@ -2273,7 +2253,7 @@ df_chain_bottom_dump (basic_block bb, FILE *file) fprintf (file, "\n"); use_rec++; } - } + } FOR_BB_INSNS (bb, insn) { @@ -2284,9 +2264,9 @@ df_chain_bottom_dump (basic_block bb, FILE *file) use_rec = DF_INSN_INFO_USES (insn_info); if (*use_rec || *eq_use_rec) { - fprintf (file, ";; UD chains for insn luid %d uid %d\n", + fprintf (file, ";; UD chains for insn luid %d uid %d\n", DF_INSN_INFO_LUID (insn_info), INSN_UID (insn)); - + while (*use_rec) { df_ref use = *use_rec; @@ -2322,8 +2302,8 @@ static struct df_problem problem_CHAIN = NULL, /* Local compute function. */ NULL, /* Init the solution specific data. */ NULL, /* Iterative solver. */ - NULL, /* Confluence operator 0. */ - NULL, /* Confluence operator n. */ + NULL, /* Confluence operator 0. */ + NULL, /* Confluence operator n. */ NULL, /* Transfer function. */ df_chain_finalize, /* Finalize function. */ df_chain_free, /* Free all of the problem information. */ @@ -2396,8 +2376,8 @@ struct df_byte_lr_problem_data bitmap needs_expansion; /* The start position and len for each regno in the various bit - vectors. */ - unsigned int* regno_start; + vectors. */ + unsigned int* regno_start; unsigned int* regno_len; /* An obstack for the bitmaps we need for this problem. */ bitmap_obstack byte_lr_bitmaps; @@ -2406,10 +2386,10 @@ struct df_byte_lr_problem_data /* Get the starting location for REGNO in the df_byte_lr bitmaps. */ -int +int df_byte_lr_get_regno_start (unsigned int regno) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; return problem_data->regno_start[regno]; } @@ -2417,10 +2397,10 @@ df_byte_lr_get_regno_start (unsigned int regno) /* Get the len for REGNO in the df_byte_lr bitmaps. */ -int +int df_byte_lr_get_regno_len (unsigned int regno) -{ - struct df_byte_lr_problem_data *problem_data +{ + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data;; return problem_data->regno_len[regno]; } @@ -2429,7 +2409,7 @@ df_byte_lr_get_regno_len (unsigned int regno) /* Set basic block info. */ static void -df_byte_lr_set_bb_info (unsigned int index, +df_byte_lr_set_bb_info (unsigned int index, struct df_byte_lr_bb_info *bb_info) { gcc_assert (df_byte_lr); @@ -2437,11 +2417,11 @@ df_byte_lr_set_bb_info (unsigned int index, df_byte_lr->block_info[index] = bb_info; } - + /* Free basic block info. */ static void -df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, +df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, void *vbb_info) { struct df_byte_lr_bb_info *bb_info = (struct df_byte_lr_bb_info *) vbb_info; @@ -2462,14 +2442,14 @@ df_byte_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, static void df_byte_lr_check_regs (df_ref *ref_rec) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; for (; *ref_rec; ref_rec++) { df_ref ref = *ref_rec; - if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT - | DF_REF_ZERO_EXTRACT + if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT + | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART) || GET_CODE (DF_REF_REG (ref)) == SUBREG) bitmap_set_bit (problem_data->needs_expansion, DF_REF_REGNO (ref)); @@ -2477,13 +2457,13 @@ df_byte_lr_check_regs (df_ref *ref_rec) } -/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by +/* Expand bitmap SRC which is indexed by regno to DEST which is indexed by regno_start and regno_len. */ static void df_byte_lr_expand_bitmap (bitmap dest, bitmap src) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; bitmap_iterator bi; unsigned int i; @@ -2491,7 +2471,7 @@ df_byte_lr_expand_bitmap (bitmap dest, bitmap src) bitmap_clear (dest); EXECUTE_IF_SET_IN_BITMAP (src, 0, i, bi) { - bitmap_set_range (dest, problem_data->regno_start[i], + bitmap_set_range (dest, problem_data->regno_start[i], problem_data->regno_len[i]); } } @@ -2500,7 +2480,7 @@ df_byte_lr_expand_bitmap (bitmap dest, bitmap src) /* Allocate or reset bitmaps for DF_BYTE_LR blocks. The solution bits are not touched unless the block is new. */ -static void +static void df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { unsigned int bb_index; @@ -2515,7 +2495,7 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) df_byte_lr->problem_data = problem_data; if (!df_byte_lr->block_pool) - df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool", + df_byte_lr->block_pool = create_alloc_pool ("df_byte_lr_block pool", sizeof (struct df_byte_lr_bb_info), 50); df_grow_bb_info (df_byte_lr); @@ -2532,7 +2512,7 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) problem_data->hardware_regs_used = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); problem_data->invalidated_by_call = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); problem_data->needs_expansion = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); - + /* Discover which regno's use subregs, extracts or strict_low_parts. */ FOR_EACH_BB (bb) @@ -2552,7 +2532,7 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, ENTRY_BLOCK); bitmap_set_bit (df_byte_lr->out_of_date_transfer_functions, EXIT_BLOCK); - + /* Allocate the slots for each regno. */ for (regno = 0; regno < max_reg; regno++) { @@ -2560,28 +2540,28 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) problem_data->regno_start[regno] = index; if (bitmap_bit_p (problem_data->needs_expansion, regno)) len = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno])); - else + else len = 1; - + problem_data->regno_len[regno] = len; index += len; } - df_byte_lr_expand_bitmap (problem_data->hardware_regs_used, + df_byte_lr_expand_bitmap (problem_data->hardware_regs_used, df->hardware_regs_used); - df_byte_lr_expand_bitmap (problem_data->invalidated_by_call, + df_byte_lr_expand_bitmap (problem_data->invalidated_by_call, regs_invalidated_by_call_regset); EXECUTE_IF_SET_IN_BITMAP (df_byte_lr->out_of_date_transfer_functions, 0, bb_index, bi) { struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); if (bb_info) - { + { bitmap_clear (bb_info->def); bitmap_clear (bb_info->use); } else - { + { bb_info = (struct df_byte_lr_bb_info *) pool_alloc (df_byte_lr->block_pool); df_byte_lr_set_bb_info (bb_index, bb_info); bb_info->use = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); @@ -2590,14 +2570,14 @@ df_byte_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) bb_info->out = BITMAP_ALLOC (&problem_data->byte_lr_bitmaps); } } - + df_byte_lr->optional_p = true; } /* Reset the global solution for recalculation. */ -static void +static void df_byte_lr_reset (bitmap all_blocks) { unsigned int bb_index; @@ -2618,7 +2598,7 @@ df_byte_lr_reset (bitmap all_blocks) static void df_byte_lr_bb_local_compute (unsigned int bb_index) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; basic_block bb = BASIC_BLOCK (bb_index); struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb_index); @@ -2659,7 +2639,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index) unsigned int uid = INSN_UID (insn); if (!INSN_P (insn)) - continue; + continue; for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) { @@ -2720,7 +2700,7 @@ df_byte_lr_bb_local_compute (unsigned int bb_index) bitmap_clear_range (bb_info->use, start, len); } } - + #ifdef EH_USES /* Process the uses that are live into an exception handler. */ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) @@ -2766,7 +2746,7 @@ df_byte_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED) /* Initialize the solution vectors. */ -static void +static void df_byte_lr_init (bitmap all_blocks) { unsigned int bb_index; @@ -2787,12 +2767,12 @@ df_byte_lr_init (bitmap all_blocks) static void df_byte_lr_confluence_0 (basic_block bb) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; bitmap op1 = df_byte_lr_get_bb_info (bb->index)->out; if (bb != EXIT_BLOCK_PTR) bitmap_copy (op1, problem_data->hardware_regs_used); -} +} /* Confluence function that ignores fake edges. */ @@ -2800,11 +2780,11 @@ df_byte_lr_confluence_0 (basic_block bb) static void df_byte_lr_confluence_n (edge e) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; bitmap op1 = df_byte_lr_get_bb_info (e->src->index)->out; bitmap op2 = df_byte_lr_get_bb_info (e->dest->index)->in; - + /* Call-clobbered registers die across exception and call edges. */ /* ??? Abnormal call edges ignored for the moment, as this gets confused by sibling call edges, which crashes reg-stack. */ @@ -2814,7 +2794,7 @@ df_byte_lr_confluence_n (edge e) bitmap_ior_into (op1, op2); bitmap_ior_into (op1, problem_data->hardware_regs_used); -} +} /* Transfer function. */ @@ -2865,14 +2845,14 @@ df_byte_lr_top_dump (basic_block bb, FILE *file) struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); if (!bb_info || !bb_info->in) return; - + fprintf (file, ";; blr in \t"); df_print_byte_regset (file, bb_info->in); fprintf (file, ";; blr use \t"); df_print_byte_regset (file, bb_info->use); fprintf (file, ";; blr def \t"); df_print_byte_regset (file, bb_info->def); -} +} /* Debugging info at bottom of bb. */ @@ -2883,10 +2863,10 @@ df_byte_lr_bottom_dump (basic_block bb, FILE *file) struct df_byte_lr_bb_info *bb_info = df_byte_lr_get_bb_info (bb->index); if (!bb_info || !bb_info->out) return; - + fprintf (file, ";; blr out \t"); df_print_byte_regset (file, bb_info->out); -} +} /* All of the information associated with every instance of the problem. */ @@ -2901,8 +2881,8 @@ static struct df_problem problem_BYTE_LR = df_byte_lr_local_compute, /* Local compute function. */ df_byte_lr_init, /* Init the solution specific data. */ df_worklist_dataflow, /* Worklist solver. */ - df_byte_lr_confluence_0, /* Confluence operator 0. */ - df_byte_lr_confluence_n, /* Confluence operator n. */ + df_byte_lr_confluence_0, /* Confluence operator 0. */ + df_byte_lr_confluence_n, /* Confluence operator n. */ df_byte_lr_transfer_function, /* Transfer function. */ NULL, /* Finalize function. */ df_byte_lr_free, /* Free all of the problem information. */ @@ -2913,7 +2893,7 @@ static struct df_problem problem_BYTE_LR = NULL, /* Incremental solution verify start. */ NULL, /* Incremental solution verify end. */ NULL, /* Dependent problem. */ - TV_DF_BYTE_LR, /* Timing variable. */ + TV_DF_BYTE_LR, /* Timing variable. */ false /* Reset blocks on dropping out of blocks_to_analyze. */ }; @@ -2937,7 +2917,7 @@ df_byte_lr_add_problem (void) void df_byte_lr_simulate_defs (rtx insn, bitmap live) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; df_ref *def_rec; unsigned int uid = INSN_UID (insn); @@ -2965,15 +2945,15 @@ df_byte_lr_simulate_defs (rtx insn, bitmap live) bitmap_clear_range (live, start, len); } } -} +} /* Simulate the effects of the uses of INSN on LIVE. */ -void +void df_byte_lr_simulate_uses (rtx insn, bitmap live) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; df_ref *use_rec; unsigned int uid = INSN_UID (insn); @@ -2986,13 +2966,13 @@ df_byte_lr_simulate_uses (rtx insn, bitmap live) unsigned int len = problem_data->regno_len[uregno]; unsigned int sb; unsigned int lb; - + if (!df_compute_accessed_bytes (use, DF_MM_MAY, &sb, &lb)) { start += sb; len = lb - sb; } - + /* Add use to set of uses in this BB. */ if (len) bitmap_set_range (live, start, len); @@ -3003,17 +2983,17 @@ df_byte_lr_simulate_uses (rtx insn, bitmap live) /* Apply the artificial uses and defs at the top of BB in a forwards direction. */ -void +void df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; df_ref *def_rec; #ifdef EH_USES df_ref *use_rec; #endif int bb_index = bb->index; - + #ifdef EH_USES for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++) { @@ -3032,7 +3012,7 @@ df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live) { df_ref def = *def_rec; if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - { + { unsigned int dregno = DF_REF_REGNO (def); unsigned int start = problem_data->regno_start[dregno]; unsigned int len = problem_data->regno_len[dregno]; @@ -3045,15 +3025,15 @@ df_byte_lr_simulate_artificial_refs_at_top (basic_block bb, bitmap live) /* Apply the artificial uses and defs at the end of BB in a backwards direction. */ -void +void df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live) { - struct df_byte_lr_problem_data *problem_data + struct df_byte_lr_problem_data *problem_data = (struct df_byte_lr_problem_data *)df_byte_lr->problem_data; df_ref *def_rec; df_ref *use_rec; int bb_index = bb->index; - + for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) { df_ref def = *def_rec; @@ -3085,14 +3065,14 @@ df_byte_lr_simulate_artificial_refs_at_end (basic_block bb, bitmap live) This problem computes REG_DEAD and REG_UNUSED notes. ----------------------------------------------------------------------------*/ -static void +static void df_note_alloc (bitmap all_blocks ATTRIBUTE_UNUSED) { df_note->optional_p = true; } #ifdef REG_DEAD_DEBUGGING -static void +static void df_print_note (const char *prefix, rtx insn, rtx note) { if (dump_file) @@ -3110,14 +3090,14 @@ df_print_note (const char *prefix, rtx insn, rtx note) just leave the notes alone. */ #ifdef STACK_REGS -static inline bool +static inline bool df_ignore_stack_reg (int regno) { return regstack_completed && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG); } #else -static inline bool +static inline bool df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED) { return false; @@ -3141,7 +3121,7 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) switch (REG_NOTE_KIND (link)) { case REG_DEAD: - /* After reg-stack, we need to ignore any unused notes + /* After reg-stack, we need to ignore any unused notes for the stack registers. */ if (df_ignore_stack_reg (REGNO (XEXP (link, 0)))) { @@ -3161,7 +3141,7 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) break; case REG_UNUSED: - /* After reg-stack, we need to ignore any unused notes + /* After reg-stack, we need to ignore any unused notes for the stack registers. */ if (df_ignore_stack_reg (REGNO (XEXP (link, 0)))) { @@ -3179,7 +3159,7 @@ df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes) *pprev = link = next; } break; - + default: pprev = &XEXP (link, 1); link = *pprev; @@ -3201,6 +3181,8 @@ df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg) rtx curr = old; rtx prev = NULL; + gcc_assert (!DEBUG_INSN_P (insn)); + while (curr) if (XEXP (curr, 0) == reg) { @@ -3217,7 +3199,7 @@ df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg) prev = curr; curr = XEXP (curr, 1); } - + /* Did not find the note. */ add_reg_note (insn, note_type, reg); return old; @@ -3258,14 +3240,14 @@ df_whole_mw_reg_unused_p (struct df_mw_hardreg *mws, static rtx df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, - bitmap live, bitmap do_not_gen, + bitmap live, bitmap do_not_gen, bitmap artificial_uses) { unsigned int r; - + #ifdef REG_DEAD_DEBUGGING if (dump_file) - fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n", + fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n", mws->start_regno, mws->end_regno); #endif @@ -3333,14 +3315,17 @@ df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws, static rtx df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, bitmap live, bitmap do_not_gen, - bitmap artificial_uses) + bitmap artificial_uses, bool *added_notes_p) { unsigned int r; - + bool is_debug = *added_notes_p; + + *added_notes_p = false; + #ifdef REG_DEAD_DEBUGGING if (dump_file) { - fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =", + fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =", mws->start_regno, mws->end_regno); df_print_regset (dump_file, do_not_gen); fprintf (dump_file, " live ="); @@ -3353,6 +3338,11 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen)) { /* Add a dead note for the entire multi word register. */ + if (is_debug) + { + *added_notes_p = true; + return old; + } old = df_set_note (REG_DEAD, insn, old, mws->mw_reg); #ifdef REG_DEAD_DEBUGGING df_print_note ("adding 1: ", insn, REG_NOTES (insn)); @@ -3365,6 +3355,11 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, && !bitmap_bit_p (artificial_uses, r) && !bitmap_bit_p (do_not_gen, r)) { + if (is_debug) + { + *added_notes_p = true; + return old; + } old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]); #ifdef REG_DEAD_DEBUGGING df_print_note ("adding 2: ", insn, REG_NOTES (insn)); @@ -3379,11 +3374,11 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, LIVE. Do not generate notes for registers in ARTIFICIAL_USES. */ static rtx -df_create_unused_note (rtx insn, rtx old, df_ref def, +df_create_unused_note (rtx insn, rtx old, df_ref def, bitmap live, bitmap artificial_uses) { unsigned int dregno = DF_REF_REGNO (def); - + #ifdef REG_DEAD_DEBUGGING if (dump_file) { @@ -3397,30 +3392,185 @@ df_create_unused_note (rtx insn, rtx old, df_ref def, || bitmap_bit_p (artificial_uses, dregno) || df_ignore_stack_reg (dregno))) { - rtx reg = (DF_REF_LOC (def)) + rtx reg = (DF_REF_LOC (def)) ? *DF_REF_REAL_LOC (def): DF_REF_REG (def); old = df_set_note (REG_UNUSED, insn, old, reg); #ifdef REG_DEAD_DEBUGGING df_print_note ("adding 3: ", insn, REG_NOTES (insn)); #endif } - + return old; } +/* Node of a linked list of uses of dead REGs in debug insns. */ +struct dead_debug_use +{ + df_ref use; + struct dead_debug_use *next; +}; + +/* Linked list of the above, with a bitmap of the REGs in the + list. */ +struct dead_debug +{ + struct dead_debug_use *head; + bitmap used; + bitmap to_rescan; +}; + +/* Initialize DEBUG to an empty list, and clear USED, if given. */ +static inline void +dead_debug_init (struct dead_debug *debug, bitmap used) +{ + debug->head = NULL; + debug->used = used; + debug->to_rescan = NULL; + if (used) + bitmap_clear (used); +} + +/* Reset all debug insns with pending uses. Release the bitmap in it, + unless it is USED. USED must be the same bitmap passed to + dead_debug_init. */ +static inline void +dead_debug_finish (struct dead_debug *debug, bitmap used) +{ + struct dead_debug_use *head; + rtx insn = NULL; + + if (debug->used != used) + BITMAP_FREE (debug->used); + + while ((head = debug->head)) + { + insn = DF_REF_INSN (head->use); + if (!head->next || DF_REF_INSN (head->next->use) != insn) + { + INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); + df_insn_rescan_debug_internal (insn); + if (debug->to_rescan) + bitmap_clear_bit (debug->to_rescan, INSN_UID (insn)); + } + debug->head = head->next; + XDELETE (head); + } + + if (debug->to_rescan) + { + bitmap_iterator bi; + unsigned int uid; + + EXECUTE_IF_SET_IN_BITMAP (debug->to_rescan, 0, uid, bi) + { + struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid); + if (insn_info) + df_insn_rescan (insn_info->insn); + } + BITMAP_FREE (debug->to_rescan); + } +} + +/* Add USE to DEBUG. It must be a dead reference to UREGNO in a debug + insn. Create a bitmap for DEBUG as needed. */ +static inline void +dead_debug_add (struct dead_debug *debug, df_ref use, unsigned int uregno) +{ + struct dead_debug_use *newddu = XNEW (struct dead_debug_use); + + newddu->use = use; + newddu->next = debug->head; + debug->head = newddu; + + if (!debug->used) + debug->used = BITMAP_ALLOC (NULL); + + bitmap_set_bit (debug->used, uregno); +} + +/* If UREGNO is referenced by any entry in DEBUG, emit a debug insn + before INSN that binds the REG to a debug temp, and replace all + uses of UREGNO in DEBUG with uses of the debug temp. INSN must be + the insn where UREGNO dies. */ +static inline void +dead_debug_insert_before (struct dead_debug *debug, unsigned int uregno, + rtx insn) +{ + struct dead_debug_use **tailp = &debug->head; + struct dead_debug_use *cur; + struct dead_debug_use *uses = NULL; + struct dead_debug_use **usesp = &uses; + rtx reg = NULL; + rtx dval; + rtx bind; + + if (!debug->used || !bitmap_clear_bit (debug->used, uregno)) + return; + + /* Move all uses of uregno from debug->head to uses, setting mode to + the widest referenced mode. */ + while ((cur = *tailp)) + { + if (DF_REF_REGNO (cur->use) == uregno) + { + *usesp = cur; + usesp = &cur->next; + *tailp = cur->next; + cur->next = NULL; + if (!reg + || (GET_MODE_BITSIZE (GET_MODE (reg)) + < GET_MODE_BITSIZE (GET_MODE (*DF_REF_REAL_LOC (cur->use))))) + reg = *DF_REF_REAL_LOC (cur->use); + } + else + tailp = &(*tailp)->next; + } + + gcc_assert (reg); + + /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */ + dval = make_debug_expr_from_rtl (reg); + + /* Emit a debug bind insn before the insn in which reg dies. */ + bind = gen_rtx_VAR_LOCATION (GET_MODE (reg), + DEBUG_EXPR_TREE_DECL (dval), reg, + VAR_INIT_STATUS_INITIALIZED); + + bind = emit_debug_insn_before (bind, insn); + df_insn_rescan (bind); + + /* Adjust all uses. */ + while ((cur = uses)) + { + if (GET_MODE (*DF_REF_REAL_LOC (cur->use)) == GET_MODE (reg)) + *DF_REF_REAL_LOC (cur->use) = dval; + else + *DF_REF_REAL_LOC (cur->use) + = gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval); + /* ??? Should we simplify subreg of subreg? */ + if (debug->to_rescan == NULL) + debug->to_rescan = BITMAP_ALLOC (NULL); + bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use))); + uses = cur->next; + XDELETE (cur); + } +} /* Recompute the REG_DEAD and REG_UNUSED notes and compute register info: lifetime, bb, and number of defs and uses for basic block BB. The three bitvectors are scratch regs used here. */ static void -df_note_bb_compute (unsigned int bb_index, +df_note_bb_compute (unsigned int bb_index, bitmap live, bitmap do_not_gen, bitmap artificial_uses) { basic_block bb = BASIC_BLOCK (bb_index); rtx insn; df_ref *def_rec; df_ref *use_rec; + struct dead_debug debug; + + dead_debug_init (&debug, NULL); bitmap_copy (live, df_get_live_out (bb)); bitmap_clear (artificial_uses); @@ -3454,13 +3604,13 @@ df_note_bb_compute (unsigned int bb_index, { unsigned int regno = DF_REF_REGNO (use); bitmap_set_bit (live, regno); - + /* Notes are not generated for any of the artificial registers at the bottom of the block. */ bitmap_set_bit (artificial_uses, regno); } } - + #ifdef REG_DEAD_DEBUGGING if (dump_file) { @@ -3475,10 +3625,13 @@ df_note_bb_compute (unsigned int bb_index, struct df_mw_hardreg **mws_rec; rtx old_dead_notes; rtx old_unused_notes; - + int debug_insn; + if (!INSN_P (insn)) continue; + debug_insn = DEBUG_INSN_P (insn); + bitmap_clear (do_not_gen); df_kill_notes (insn, &old_dead_notes, &old_unused_notes); @@ -3497,12 +3650,12 @@ df_note_bb_compute (unsigned int bb_index, mws_rec = DF_INSN_UID_MWS (uid); while (*mws_rec) { - struct df_mw_hardreg *mws = *mws_rec; - if ((DF_MWS_REG_DEF_P (mws)) + struct df_mw_hardreg *mws = *mws_rec; + if ((DF_MWS_REG_DEF_P (mws)) && !df_ignore_stack_reg (mws->start_regno)) - old_unused_notes - = df_set_unused_notes_for_mw (insn, old_unused_notes, - mws, live, do_not_gen, + old_unused_notes + = df_set_unused_notes_for_mw (insn, old_unused_notes, + mws, live, do_not_gen, artificial_uses); mws_rec++; } @@ -3516,7 +3669,7 @@ df_note_bb_compute (unsigned int bb_index, if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) { old_unused_notes - = df_create_unused_note (insn, old_unused_notes, + = df_create_unused_note (insn, old_unused_notes, def, live, artificial_uses); bitmap_set_bit (do_not_gen, dregno); } @@ -3531,11 +3684,11 @@ df_note_bb_compute (unsigned int bb_index, mws_rec = DF_INSN_UID_MWS (uid); while (*mws_rec) { - struct df_mw_hardreg *mws = *mws_rec; + struct df_mw_hardreg *mws = *mws_rec; if (DF_MWS_REG_DEF_P (mws)) old_unused_notes - = df_set_unused_notes_for_mw (insn, old_unused_notes, - mws, live, do_not_gen, + = df_set_unused_notes_for_mw (insn, old_unused_notes, + mws, live, do_not_gen, artificial_uses); mws_rec++; } @@ -3545,7 +3698,7 @@ df_note_bb_compute (unsigned int bb_index, df_ref def = *def_rec; unsigned int dregno = DF_REF_REGNO (def); old_unused_notes - = df_create_unused_note (insn, old_unused_notes, + = df_create_unused_note (insn, old_unused_notes, def, live, artificial_uses); if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)) @@ -3555,18 +3708,26 @@ df_note_bb_compute (unsigned int bb_index, bitmap_clear_bit (live, dregno); } } - + /* Process the uses. */ mws_rec = DF_INSN_UID_MWS (uid); while (*mws_rec) { - struct df_mw_hardreg *mws = *mws_rec; - if ((DF_MWS_REG_DEF_P (mws)) + struct df_mw_hardreg *mws = *mws_rec; + if ((DF_MWS_REG_DEF_P (mws)) && !df_ignore_stack_reg (mws->start_regno)) - old_dead_notes - = df_set_dead_notes_for_mw (insn, old_dead_notes, - mws, live, do_not_gen, - artificial_uses); + { + bool really_add_notes = debug_insn != 0; + + old_dead_notes + = df_set_dead_notes_for_mw (insn, old_dead_notes, + mws, live, do_not_gen, + artificial_uses, + &really_add_notes); + + if (really_add_notes) + debug_insn = -1; + } mws_rec++; } @@ -3576,7 +3737,7 @@ df_note_bb_compute (unsigned int bb_index, unsigned int uregno = DF_REF_REGNO (use); #ifdef REG_DEAD_DEBUGGING - if (dump_file) + if (dump_file && !debug_insn) { fprintf (dump_file, " regular looking at use "); df_ref_debug (use, dump_file); @@ -3584,13 +3745,25 @@ df_note_bb_compute (unsigned int bb_index, #endif if (!bitmap_bit_p (live, uregno)) { + if (debug_insn) + { + if (debug_insn > 0) + { + dead_debug_add (&debug, use, uregno); + continue; + } + break; + } + else + dead_debug_insert_before (&debug, uregno, insn); + if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG)) && (!bitmap_bit_p (do_not_gen, uregno)) && (!bitmap_bit_p (artificial_uses, uregno)) && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE)) && (!df_ignore_stack_reg (uregno))) { - rtx reg = (DF_REF_LOC (use)) + rtx reg = (DF_REF_LOC (use)) ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use); old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg); @@ -3615,7 +3788,17 @@ df_note_bb_compute (unsigned int bb_index, free_EXPR_LIST_node (old_dead_notes); old_dead_notes = next; } + + if (debug_insn == -1) + { + /* ??? We could probably do better here, replacing dead + registers with their definitions. */ + INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (); + df_insn_rescan_debug_internal (insn); + } } + + dead_debug_finish (&debug, NULL); } @@ -3666,8 +3849,8 @@ static struct df_problem problem_NOTE = df_note_compute, /* Local compute function. */ NULL, /* Init the solution specific data. */ NULL, /* Iterative solver. */ - NULL, /* Confluence operator 0. */ - NULL, /* Confluence operator n. */ + NULL, /* Confluence operator 0. */ + NULL, /* Confluence operator n. */ NULL, /* Transfer function. */ NULL, /* Finalize function. */ df_note_free, /* Free all of the problem information. */ @@ -3697,13 +3880,15 @@ df_note_add_problem (void) /*---------------------------------------------------------------------------- - Functions for simulating the effects of single insns. + Functions for simulating the effects of single insns. You can either simulate in the forwards direction, starting from the top of a block or the backwards direction from the end of the - block. The main difference is that if you go forwards, the uses - are examined first then the defs, and if you go backwards, the defs - are examined first then the uses. + block. If you go backwards, defs are examined first to clear bits, + then uses are examined to set bits. If you go forwards, defs are + examined first to set bits, then REG_DEAD and REG_UNUSED notes + are examined to clear bits. In either case, the result of examining + a def can be undone (respectively by a use or a REG_UNUSED note). If you start at the top of the block, use one of DF_LIVE_IN or DF_LR_IN. If you start at the bottom of the block use one of @@ -3723,9 +3908,22 @@ df_simulate_find_defs (rtx insn, bitmap defs) for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) { df_ref def = *def_rec; - /* If the def is to only part of the reg, it does - not kill the other defs that reach here. */ - if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) + bitmap_set_bit (defs, DF_REF_REGNO (def)); + } +} + +/* Find the set of real DEFs, which are not clobbers, for INSN. */ + +void +df_simulate_find_noclobber_defs (rtx insn, bitmap defs) +{ + df_ref *def_rec; + unsigned int uid = INSN_UID (insn); + + for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + { + df_ref def = *def_rec; + if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))) bitmap_set_bit (defs, DF_REF_REGNO (def)); } } @@ -3749,17 +3947,20 @@ df_simulate_defs (rtx insn, bitmap live) if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))) bitmap_clear_bit (live, dregno); } -} +} /* Simulate the effects of the uses of INSN on LIVE. */ -void +void df_simulate_uses (rtx insn, bitmap live) { df_ref *use_rec; unsigned int uid = INSN_UID (insn); + if (DEBUG_INSN_P (insn)) + return; + for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) { df_ref use = *use_rec; @@ -3790,7 +3991,7 @@ df_simulate_fixup_sets (basic_block bb, bitmap live) df_simulate_initialize_backwards should be called first with a bitvector copyied from the DF_LIVE_OUT or DF_LR_OUT. Then df_simulate_one_insn_backwards should be called for each insn in - the block, starting with the last on. Finally, + the block, starting with the last one. Finally, df_simulate_finalize_backwards can be called to get a new value of the sets at the top of the block (this is rarely used). ----------------------------------------------------------------------------*/ @@ -3798,13 +3999,13 @@ df_simulate_fixup_sets (basic_block bb, bitmap live) /* Apply the artificial uses and defs at the end of BB in a backwards direction. */ -void +void df_simulate_initialize_backwards (basic_block bb, bitmap live) { df_ref *def_rec; df_ref *use_rec; int bb_index = bb->index; - + for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) { df_ref def = *def_rec; @@ -3823,12 +4024,12 @@ df_simulate_initialize_backwards (basic_block bb, bitmap live) /* Simulate the backwards effects of INSN on the bitmap LIVE. */ -void +void df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) { - if (! INSN_P (insn)) - return; - + if (!NONDEBUG_INSN_P (insn)) + return; + df_simulate_defs (insn, live); df_simulate_uses (insn, live); df_simulate_fixup_sets (bb, live); @@ -3838,7 +4039,7 @@ df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) /* Apply the artificial uses and defs at the top of BB in a backwards direction. */ -void +void df_simulate_finalize_backwards (basic_block bb, bitmap live) { df_ref *def_rec; @@ -3846,7 +4047,7 @@ df_simulate_finalize_backwards (basic_block bb, bitmap live) df_ref *use_rec; #endif int bb_index = bb->index; - + for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) { df_ref def = *def_rec; @@ -3866,47 +4067,51 @@ df_simulate_finalize_backwards (basic_block bb, bitmap live) /*---------------------------------------------------------------------------- The following three functions are used only for FORWARDS scanning: i.e. they process the defs and the REG_DEAD and REG_UNUSED notes. - Thus it is important to add the DF_NOTES problem to the stack of + Thus it is important to add the DF_NOTES problem to the stack of problems computed before using these functions. df_simulate_initialize_forwards should be called first with a bitvector copyied from the DF_LIVE_IN or DF_LR_IN. Then df_simulate_one_insn_forwards should be called for each insn in - the block, starting with the last on. Finally, - df_simulate_finalize_forwards can be called to get a new value - of the sets at the bottom of the block (this is rarely used). + the block, starting with the first one. ----------------------------------------------------------------------------*/ -/* Apply the artificial uses and defs at the top of BB in a backwards - direction. */ +/* Initialize the LIVE bitmap, which should be copied from DF_LIVE_IN or + DF_LR_IN for basic block BB, for forward scanning by marking artificial + defs live. */ -void +void df_simulate_initialize_forwards (basic_block bb, bitmap live) { df_ref *def_rec; int bb_index = bb->index; - + for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) { df_ref def = *def_rec; if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) - bitmap_clear_bit (live, DF_REF_REGNO (def)); + bitmap_set_bit (live, DF_REF_REGNO (def)); } } -/* Simulate the backwards effects of INSN on the bitmap LIVE. */ +/* Simulate the forwards effects of INSN on the bitmap LIVE. */ -void +void df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) { rtx link; if (! INSN_P (insn)) - return; + return; - /* Make sure that the DF_NOTES really is an active df problem. */ + /* Make sure that DF_NOTE really is an active df problem. */ gcc_assert (df_note); - df_simulate_defs (insn, live); + /* Note that this is the opposite as how the problem is defined, because + in the LR problem defs _kill_ liveness. However, they do so backwards, + while here the scan is performed forwards! So, first assume that the + def is live, and if this is not true REG_UNUSED notes will rectify the + situation. */ + df_simulate_find_noclobber_defs (insn, live); /* Clear all of the registers that go dead. */ for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) @@ -3924,7 +4129,7 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) while (--n >= 0) bitmap_clear_bit (live, regno + n); } - else + else bitmap_clear_bit (live, regno); } break; @@ -3936,19 +4141,464 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) } -/* Apply the artificial uses and defs at the end of BB in a backwards - direction. */ + +/*---------------------------------------------------------------------------- + MULTIPLE DEFINITIONS + + Find the locations in the function reached by multiple definition sites + for a live pseudo. In and out bitvectors are built for each basic + block. They are restricted for efficiency to live registers. + + The gen and kill sets for the problem are obvious. Together they + include all defined registers in a basic block; the gen set includes + registers where a partial or conditional or may-clobber definition is + last in the BB, while the kill set includes registers with a complete + definition coming last. However, the computation of the dataflow + itself is interesting. + + The idea behind it comes from SSA form's iterated dominance frontier + criterion for inserting PHI functions. Just like in that case, we can use + the dominance frontier to find places where multiple definitions meet; + a register X defined in a basic block BB1 has multiple definitions in + basic blocks in BB1's dominance frontier. + + So, the in-set of a basic block BB2 is not just the union of the + out-sets of BB2's predecessors, but includes some more bits that come + from the basic blocks of whose dominance frontier BB2 is part (BB1 in + the previous paragraph). I called this set the init-set of BB2. + + (Note: I actually use the kill-set only to build the init-set. + gen bits are anyway propagated from BB1 to BB2 by dataflow). + + For example, if you have -void -df_simulate_finalize_forwards (basic_block bb, bitmap live) + BB1 : r10 = 0 + r11 = 0 + if <...> goto BB2 else goto BB3; + + BB2 : r10 = 1 + r12 = 1 + goto BB3; + + BB3 : + + you have BB3 in BB2's dominance frontier but not in BB1's, so that the + init-set of BB3 includes r10 and r12, but not r11. Note that we do + not need to iterate the dominance frontier, because we do not insert + anything like PHI functions there! Instead, dataflow will take care of + propagating the information to BB3's successors. + ---------------------------------------------------------------------------*/ + +/* Scratch var used by transfer functions. This is used to do md analysis + only for live registers. */ +static bitmap df_md_scratch; + +/* Set basic block info. */ + +static void +df_md_set_bb_info (unsigned int index, + struct df_md_bb_info *bb_info) +{ + gcc_assert (df_md); + gcc_assert (index < df_md->block_info_size); + df_md->block_info[index] = bb_info; +} + + +static void +df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED, + void *vbb_info) +{ + struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info; + if (bb_info) + { + BITMAP_FREE (bb_info->kill); + BITMAP_FREE (bb_info->gen); + BITMAP_FREE (bb_info->init); + BITMAP_FREE (bb_info->in); + BITMAP_FREE (bb_info->out); + pool_free (df_md->block_pool, bb_info); + } +} + + +/* Allocate or reset bitmaps for DF_MD. The solution bits are + not touched unless the block is new. */ + +static void +df_md_alloc (bitmap all_blocks) +{ + unsigned int bb_index; + bitmap_iterator bi; + + if (!df_md->block_pool) + df_md->block_pool = create_alloc_pool ("df_md_block pool", + sizeof (struct df_md_bb_info), 50); + + df_grow_bb_info (df_md); + df_md_scratch = BITMAP_ALLOC (NULL); + + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) + { + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); + if (bb_info) + { + bitmap_clear (bb_info->init); + bitmap_clear (bb_info->gen); + bitmap_clear (bb_info->kill); + bitmap_clear (bb_info->in); + bitmap_clear (bb_info->out); + } + else + { + bb_info = (struct df_md_bb_info *) pool_alloc (df_md->block_pool); + df_md_set_bb_info (bb_index, bb_info); + bb_info->init = BITMAP_ALLOC (NULL); + bb_info->gen = BITMAP_ALLOC (NULL); + bb_info->kill = BITMAP_ALLOC (NULL); + bb_info->in = BITMAP_ALLOC (NULL); + bb_info->out = BITMAP_ALLOC (NULL); + } + } + + df_md->optional_p = true; +} + +/* Add the effect of the top artificial defs of BB to the multiple definitions + bitmap LOCAL_MD. */ + +void +df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md) { - df_ref *def_rec; int bb_index = bb->index; - + df_ref *def_rec; for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++) { df_ref def = *def_rec; - if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0) - bitmap_clear_bit (live, DF_REF_REGNO (def)); + if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) + { + unsigned int dregno = DF_REF_REGNO (def); + if (DF_REF_FLAGS (def) + & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) + bitmap_set_bit (local_md, dregno); + else + bitmap_clear_bit (local_md, dregno); + } + } +} + + +/* Add the effect of the defs of INSN to the reaching definitions bitmap + LOCAL_MD. */ + +void +df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn, + bitmap local_md) +{ + unsigned uid = INSN_UID (insn); + df_ref *def_rec; + + for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) + { + df_ref def = *def_rec; + unsigned int dregno = DF_REF_REGNO (def); + if ((!(df->changeable_flags & DF_NO_HARD_REGS)) + || (dregno >= FIRST_PSEUDO_REGISTER)) + { + if (DF_REF_FLAGS (def) + & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) + bitmap_set_bit (local_md, DF_REF_ID (def)); + else + bitmap_clear_bit (local_md, DF_REF_ID (def)); + } + } +} + +static void +df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info, + df_ref *def_rec, + int top_flag) +{ + df_ref def; + bitmap_clear (seen_in_insn); + + while ((def = *def_rec++) != NULL) + { + unsigned int dregno = DF_REF_REGNO (def); + if (((!(df->changeable_flags & DF_NO_HARD_REGS)) + || (dregno >= FIRST_PSEUDO_REGISTER)) + && top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP)) + { + if (!bitmap_bit_p (seen_in_insn, dregno)) + { + if (DF_REF_FLAGS (def) + & (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER)) + { + bitmap_set_bit (bb_info->gen, dregno); + bitmap_clear_bit (bb_info->kill, dregno); + } + else + { + /* When we find a clobber and a regular def, + make sure the regular def wins. */ + bitmap_set_bit (seen_in_insn, dregno); + bitmap_set_bit (bb_info->kill, dregno); + bitmap_clear_bit (bb_info->gen, dregno); + } + } + } + } +} + + +/* Compute local multiple def info for basic block BB. */ + +static void +df_md_bb_local_compute (unsigned int bb_index) +{ + basic_block bb = BASIC_BLOCK (bb_index); + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); + rtx insn; + + /* Artificials are only hard regs. */ + if (!(df->changeable_flags & DF_NO_HARD_REGS)) + df_md_bb_local_compute_process_def (bb_info, + df_get_artificial_defs (bb_index), + DF_REF_AT_TOP); + + FOR_BB_INSNS (bb, insn) + { + unsigned int uid = INSN_UID (insn); + if (!INSN_P (insn)) + continue; + + df_md_bb_local_compute_process_def (bb_info, DF_INSN_UID_DEFS (uid), 0); + } + + if (!(df->changeable_flags & DF_NO_HARD_REGS)) + df_md_bb_local_compute_process_def (bb_info, + df_get_artificial_defs (bb_index), + 0); +} + +/* Compute local reaching def info for each basic block within BLOCKS. */ + +static void +df_md_local_compute (bitmap all_blocks) +{ + unsigned int bb_index, df_bb_index; + bitmap_iterator bi1, bi2; + basic_block bb; + bitmap *frontiers; + + seen_in_insn = BITMAP_ALLOC (NULL); + + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1) + { + df_md_bb_local_compute (bb_index); + } + + BITMAP_FREE (seen_in_insn); + + frontiers = XNEWVEC (bitmap, last_basic_block); + FOR_ALL_BB (bb) + frontiers[bb->index] = BITMAP_ALLOC (NULL); + + compute_dominance_frontiers (frontiers); + + /* Add each basic block's kills to the nodes in the frontier of the BB. */ + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi1) + { + bitmap kill = df_md_get_bb_info (bb_index)->kill; + EXECUTE_IF_SET_IN_BITMAP (frontiers[bb_index], 0, df_bb_index, bi2) + { + basic_block bb = BASIC_BLOCK (df_bb_index); + if (bitmap_bit_p (all_blocks, df_bb_index)) + bitmap_ior_and_into (df_md_get_bb_info (df_bb_index)->init, kill, + df_get_live_in (bb)); + } + } + + FOR_ALL_BB (bb) + BITMAP_FREE (frontiers[bb->index]); + free (frontiers); +} + + +/* Reset the global solution for recalculation. */ + +static void +df_md_reset (bitmap all_blocks) +{ + unsigned int bb_index; + bitmap_iterator bi; + + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) + { + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); + gcc_assert (bb_info); + bitmap_clear (bb_info->in); + bitmap_clear (bb_info->out); + } +} + +static bool +df_md_transfer_function (int bb_index) +{ + basic_block bb = BASIC_BLOCK (bb_index); + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); + bitmap in = bb_info->in; + bitmap out = bb_info->out; + bitmap gen = bb_info->gen; + bitmap kill = bb_info->kill; + + /* We need to use a scratch set here so that the value returned from this + function invocation properly reflects whether the sets changed in a + significant way; i.e. not just because the live set was anded in. */ + bitmap_and (df_md_scratch, gen, df_get_live_out (bb)); + + /* Multiple definitions of a register are not relevant if it is not + live. Thus we trim the result to the places where it is live. */ + bitmap_and_into (in, df_get_live_in (bb)); + + return bitmap_ior_and_compl (out, df_md_scratch, in, kill); +} + +/* Initialize the solution bit vectors for problem. */ + +static void +df_md_init (bitmap all_blocks) +{ + unsigned int bb_index; + bitmap_iterator bi; + + EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi) + { + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index); + + bitmap_copy (bb_info->in, bb_info->init); + df_md_transfer_function (bb_index); + } +} + +static void +df_md_confluence_0 (basic_block bb) +{ + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); + bitmap_copy (bb_info->in, bb_info->init); +} + +/* In of target gets or of out of source. */ + +static void +df_md_confluence_n (edge e) +{ + bitmap op1 = df_md_get_bb_info (e->dest->index)->in; + bitmap op2 = df_md_get_bb_info (e->src->index)->out; + + if (e->flags & EDGE_FAKE) + return; + + if (e->flags & EDGE_EH) + bitmap_ior_and_compl_into (op1, op2, regs_invalidated_by_call_regset); + else + bitmap_ior_into (op1, op2); +} + +/* Free all storage associated with the problem. */ + +static void +df_md_free (void) +{ + unsigned int i; + for (i = 0; i < df_md->block_info_size; i++) + { + struct df_md_bb_info *bb_info = df_md_get_bb_info (i); + if (bb_info) + { + BITMAP_FREE (bb_info->kill); + BITMAP_FREE (bb_info->gen); + BITMAP_FREE (bb_info->init); + BITMAP_FREE (bb_info->in); + BITMAP_FREE (bb_info->out); + } } + + BITMAP_FREE (df_md_scratch); + free_alloc_pool (df_md->block_pool); + + df_md->block_info_size = 0; + free (df_md->block_info); + free (df_md); +} + + +/* Debugging info at top of bb. */ + +static void +df_md_top_dump (basic_block bb, FILE *file) +{ + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); + if (!bb_info || !bb_info->in) + return; + + fprintf (file, ";; md in \t"); + df_print_regset (file, bb_info->in); + fprintf (file, ";; md init \t"); + df_print_regset (file, bb_info->init); + fprintf (file, ";; md gen \t"); + df_print_regset (file, bb_info->gen); + fprintf (file, ";; md kill \t"); + df_print_regset (file, bb_info->kill); +} + +/* Debugging info at bottom of bb. */ + +static void +df_md_bottom_dump (basic_block bb, FILE *file) +{ + struct df_md_bb_info *bb_info = df_md_get_bb_info (bb->index); + if (!bb_info || !bb_info->out) + return; + + fprintf (file, ";; md out \t"); + df_print_regset (file, bb_info->out); +} + +static struct df_problem problem_MD = +{ + DF_MD, /* Problem id. */ + DF_FORWARD, /* Direction. */ + df_md_alloc, /* Allocate the problem specific data. */ + df_md_reset, /* Reset global information. */ + df_md_free_bb_info, /* Free basic block info. */ + df_md_local_compute, /* Local compute function. */ + df_md_init, /* Init the solution specific data. */ + df_worklist_dataflow, /* Worklist solver. */ + df_md_confluence_0, /* Confluence operator 0. */ + df_md_confluence_n, /* Confluence operator n. */ + df_md_transfer_function, /* Transfer function. */ + NULL, /* Finalize function. */ + df_md_free, /* Free all of the problem information. */ + df_md_free, /* Remove this problem from the stack of dataflow problems. */ + NULL, /* Debugging. */ + df_md_top_dump, /* Debugging start block. */ + df_md_bottom_dump, /* Debugging end block. */ + NULL, /* Incremental solution verify start. */ + NULL, /* Incremental solution verify end. */ + NULL, /* Dependent problem. */ + TV_DF_MD, /* Timing variable. */ + false /* Reset blocks on dropping out of blocks_to_analyze. */ +}; + +/* Create a new MD instance and add it to the existing instance + of DF. */ + +void +df_md_add_problem (void) +{ + df_add_problem (&problem_MD); } + + +