+/* Remove the entries not in BLOCKS from the LIST of length LEN, preserving
+ the order of the remaining entries. Returns the length of the resulting
+ list. */
+
+static unsigned
+prune_to_subcfg (int list[], unsigned len, bitmap blocks)
+{
+ unsigned act, last;
+
+ for (act = 0, last = 0; act < len; act++)
+ if (bitmap_bit_p (blocks, list[act]))
+ list[last++] = list[act];
+
+ return last;
+}
+
+/* Alternative entry point to the analysis. Analyze just the part of the cfg
+ graph induced by BLOCKS.
+
+ TODO I am not quite sure how to avoid code duplication with df_analyze_1
+ here, and simultaneously not make even greater chaos in it. We behave
+ slightly differently in some details, especially in handling modified
+ insns. */
+
+void
+df_analyze_subcfg (struct df *df, bitmap blocks, int flags)
+{
+ rtx insn;
+ basic_block bb;
+ struct dataflow dflow;
+ unsigned n_blocks;
+
+ if (flags & DF_UD_CHAIN)
+ flags |= DF_RD | DF_RD_CHAIN;
+ if (flags & DF_DU_CHAIN)
+ flags |= DF_RU;
+ if (flags & DF_RU)
+ flags |= DF_RU_CHAIN;
+ if (flags & DF_REG_INFO)
+ flags |= DF_LR;
+
+ if (!df->n_bbs)
+ {
+ df_alloc (df, max_reg_num ());
+
+ /* Mark all insns as modified. */
+
+ FOR_EACH_BB (bb)
+ {
+ FOR_BB_INSNS (bb, insn)
+ {
+ df_insn_modify (df, bb, insn);
+ }
+ }
+ }
+
+ df->flags = flags;
+
+ df_reg_def_chain_clean (df);
+ df_reg_use_chain_clean (df);
+
+ df_refs_update (df, blocks);
+
+ /* Clear the updated stuff from ``modified'' bitmaps. */
+ FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
+ {
+ if (bitmap_bit_p (df->bbs_modified, bb->index))
+ {
+ FOR_BB_INSNS (bb, insn)
+ {
+ bitmap_clear_bit (df->insns_modified, INSN_UID (insn));
+ }
+
+ bitmap_clear_bit (df->bbs_modified, bb->index);
+ }
+ });
+
+ /* Allocate the bitmaps now the total number of defs and uses are
+ known. If the number of defs or uses have changed, then
+ these bitmaps need to be reallocated. */
+ df_bitmaps_alloc (df, blocks, flags);
+
+ /* Set the LUIDs for each specified basic block. */
+ df_luids_set (df, blocks);
+
+ /* Recreate reg-def and reg-use chains from scratch so that first
+ def is at the head of the reg-def chain and the last use is at
+ the head of the reg-use chain. This is only important for
+ regs local to a basic block as it speeds up searching. */
+ if (flags & DF_RD_CHAIN)
+ {
+ df_reg_def_chain_create (df, blocks, true);
+ }
+
+ if (flags & DF_RU_CHAIN)
+ {
+ df_reg_use_chain_create (df, blocks, true);
+ }
+
+ df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
+ df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
+ df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
+
+ flow_depth_first_order_compute (df->dfs_order, df->rc_order);
+ flow_reverse_top_sort_order_compute (df->rts_order);
+
+ n_blocks = prune_to_subcfg (df->dfs_order, n_basic_blocks, blocks);
+ prune_to_subcfg (df->rc_order, n_basic_blocks, blocks);
+ prune_to_subcfg (df->rts_order, n_basic_blocks, blocks);
+
+ dflow.in = xmalloc (sizeof (bitmap) * last_basic_block);
+ dflow.out = xmalloc (sizeof (bitmap) * last_basic_block);
+ dflow.gen = xmalloc (sizeof (bitmap) * last_basic_block);
+ dflow.kill = xmalloc (sizeof (bitmap) * last_basic_block);
+
+ if (flags & DF_RD)
+ {
+ /* Compute the sets of gens and kills for the defs of each bb. */
+ df_rd_local_compute (df, blocks);
+
+ FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
+ {
+ dflow.in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
+ dflow.out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
+ dflow.gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
+ dflow.kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
+ });
+
+ dflow.repr = SR_BITMAP;
+ dflow.dir = DF_FORWARD;
+ dflow.conf_op = DF_UNION;
+ dflow.transfun = df_rd_transfer_function;
+ dflow.n_blocks = n_blocks;
+ dflow.order = df->rc_order;
+ dflow.data = NULL;
+
+ iterative_dataflow (&dflow);
+ }
+
+ if (flags & DF_UD_CHAIN)
+ {
+ /* Create use-def chains. */
+ df_ud_chain_create (df, blocks);
+ }
+
+ if (flags & DF_RU)
+ {
+ /* Compute the sets of gens and kills for the upwards exposed
+ uses in each bb. */
+ df_ru_local_compute (df, blocks);
+
+ FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
+ {
+ dflow.in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
+ dflow.out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
+ dflow.gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
+ dflow.kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
+ });
+
+ dflow.repr = SR_BITMAP;
+ dflow.dir = DF_BACKWARD;
+ dflow.conf_op = DF_UNION;
+ dflow.transfun = df_ru_transfer_function;
+ dflow.n_blocks = n_blocks;
+ dflow.order = df->rts_order;
+ dflow.data = NULL;
+
+ iterative_dataflow (&dflow);
+ }
+
+ if (flags & DF_DU_CHAIN)
+ {
+ /* Create def-use chains. */
+ df_du_chain_create (df, blocks);
+ }
+
+ if (flags & DF_LR)
+ {
+ /* Compute the sets of defs and uses of live variables. */
+ df_lr_local_compute (df, blocks);
+
+ FOR_EACH_BB (bb)
+ {
+ dflow.in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
+ dflow.out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
+ dflow.gen[bb->index] = DF_BB_INFO (df, bb)->lr_use;
+ dflow.kill[bb->index] = DF_BB_INFO (df, bb)->lr_def;
+ }
+
+ dflow.repr = SR_BITMAP;
+ dflow.dir = DF_BACKWARD;
+ dflow.conf_op = DF_UNION;
+ dflow.transfun = df_lr_transfer_function;
+ dflow.n_blocks = n_blocks;
+ dflow.order = df->rts_order;
+ dflow.data = NULL;
+
+ iterative_dataflow (&dflow);
+ }
+
+ if (flags & DF_REG_INFO)
+ {
+ df_reg_info_compute (df, blocks);
+ }
+
+ free (dflow.in);
+ free (dflow.out);
+ free (dflow.gen);
+ free (dflow.kill);
+
+ free (df->dfs_order);
+ free (df->rc_order);
+ free (df->rts_order);
+}