-/* Print the names represented by the bitmap NAMES, to the file OUT. */
-
-static void
-dump_bitmap_of_names (FILE *out, bitmap names)
-{
- bitmap_iterator bi;
- unsigned int i;
-
- fprintf (out, " { ");
- EXECUTE_IF_SET_IN_BITMAP (names, 0, i, bi)
- {
- print_generic_expr (out, ssa_name (i), 0);
- fprintf (out, " ");
- }
- fprintf (out, "}\n");
-}
-
- /* Compute a set of representative vuse versions for each phi. This
- is so we can compute conservative kill sets in terms of all vuses
- that are killed, instead of continually walking chains.
-
- We also have to be able kill all names associated with a phi when
- the phi dies in order to ensure we don't generate overlapping
- live ranges, which are not allowed in virtual SSA. */
-
-static bitmap *vuse_names;
-static void
-compute_vuse_representatives (void)
-{
- tree phi;
- basic_block bb;
- VEC (tree, heap) *phis = NULL;
- bool changed = true;
- size_t i;
-
- FOR_EACH_BB (bb)
- {
- for (phi = phi_nodes (bb);
- phi;
- phi = PHI_CHAIN (phi))
- if (!is_gimple_reg (PHI_RESULT (phi)))
- VEC_safe_push (tree, heap, phis, phi);
- }
-
- while (changed)
- {
- changed = false;
-
- for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
- {
- size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
- use_operand_p usep;
- ssa_op_iter iter;
-
- if (vuse_names[ver] == NULL)
- {
- vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
- bitmap_set_bit (vuse_names[ver], ver);
- }
- FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
- {
- tree use = USE_FROM_PTR (usep);
- bitmap usebitmap = get_representative (vuse_names,
- SSA_NAME_VERSION (use));
- if (usebitmap != NULL)
- {
- changed |= bitmap_ior_into (vuse_names[ver],
- usebitmap);
- }
- else
- {
- changed |= !bitmap_bit_p (vuse_names[ver],
- SSA_NAME_VERSION (use));
- if (changed)
- bitmap_set_bit (vuse_names[ver],
- SSA_NAME_VERSION (use));
- }
- }
- }
- }
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
- {
- bitmap reps = get_representative (vuse_names,
- SSA_NAME_VERSION (PHI_RESULT (phi)));
- if (reps)
- {
- print_generic_expr (dump_file, PHI_RESULT (phi), 0);
- fprintf (dump_file, " represents ");
- dump_bitmap_of_names (dump_file, reps);
- }
- }
- VEC_free (tree, heap, phis);
-}
-
-/* Compute reaching vuses and antic safe loads. RVUSE computation is
- is a small bit of iterative dataflow to determine what virtual uses
- reach what blocks. Because we can't generate overlapping virtual
- uses, and virtual uses *do* actually die, this ends up being faster
- in most cases than continually walking the virtual use/def chains
- to determine whether we are inside a block where a given virtual is
- still available to be used.
-
- ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
- their vuses in the block,and thus, are safe at the top of the
- block.
-
- An example:
-
- <block begin>
- b = *a
- *a = 9
- <block end>
-
- b = *a is an antic safe load because it still safe to consider it
- ANTIC at the top of the block.
-
- We currently compute a conservative approximation to
- ANTIC_SAFE_LOADS. We compute those loads that occur before *any*
- stores in the block. This is not because it is difficult to
- compute the precise answer, but because it is expensive. More
- testing is necessary to determine whether it is worth computing the
- precise answer. */
-
-static void
-compute_rvuse_and_antic_safe (void)
-{
-
- unsigned int i;
- tree phi;
- basic_block bb;
- bool changed = true;
- unsigned int *first_store_uid;
-
- first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
-
- compute_vuse_representatives ();
-
- FOR_ALL_BB (bb)
- {
- RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
- RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
- RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
- RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
- ANTIC_SAFE_LOADS (bb) = NULL;
- }
-
- /* Mark live on entry */
- for (i = 0; i < num_ssa_names; i++)
- {
- tree name = ssa_name (i);
- if (name && !is_gimple_reg (name)
- && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
- bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
- SSA_NAME_VERSION (name));
- }
-
- /* Compute local sets for reaching vuses.
- GEN(block) = generated in block and not locally killed.
- KILL(block) = set of vuses killed in block.
- */
-
- FOR_EACH_BB (bb)
- {
- block_stmt_iterator bsi;
- ssa_op_iter iter;
- def_operand_p defp;
- use_operand_p usep;
-
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
-
- if (first_store_uid[bb->index] == 0
- && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VDEF))
- {
- first_store_uid[bb->index] = stmt_ann (stmt)->uid;
- }
-
- FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VMAYUSE)
- {
- tree use = USE_FROM_PTR (usep);
- bitmap repbit = get_representative (vuse_names,
- SSA_NAME_VERSION (use));
- if (repbit != NULL)
- {
- bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
- bitmap_ior_into (RVUSE_KILL (bb), repbit);
- }
- else
- {
- bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
- bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
- }
- }
- FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree def = DEF_FROM_PTR (defp);
- bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
- }
- }
- }
-
- FOR_EACH_BB (bb)
- {
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- if (!is_gimple_reg (PHI_RESULT (phi)))
- {
- edge e;
- edge_iterator ei;
-
- tree def = PHI_RESULT (phi);
- /* In reality, the PHI result is generated at the end of
- each predecessor block. This will make the value
- LVUSE_IN for the bb containing the PHI, which is
- correct. */
- FOR_EACH_EDGE (e, ei, bb->preds)
- bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
- }
- }
- }
-
- /* Solve reaching vuses.
-
- RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
- RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
- */
-
- changed = true;
- while (changed)
- {
- int j;
- changed = false;
- for (j = n_basic_blocks - NUM_FIXED_BLOCKS - 1; j >= 0; j--)
- {
- edge e;
- edge_iterator ei;
- bb = BASIC_BLOCK (postorder[j]);
-
- FOR_EACH_EDGE (e, ei, bb->preds)
- bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
-
- changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
- RVUSE_GEN (bb),
- RVUSE_IN (bb),
- RVUSE_KILL (bb));
- }
- }
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- FOR_ALL_BB (bb)
- {
- fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
- dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
-
- fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
- dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
-
- fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
- dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
-
- fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
- dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
- }
- }
-
- FOR_EACH_BB (bb)
- {
- bitmap_iterator bi;
-
- if (bitmap_empty_p (RVUSE_KILL (bb)))
- continue;
-
- FOR_EACH_EXPR_ID_IN_SET (EXP_GEN (bb), i, bi)
- {
- tree expr = expression_for_id (i);
- if (REFERENCE_CLASS_P (expr))
- {
- tree vh = get_value_handle (expr);
- tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
-
- if (maybe)
- {
- tree def = SSA_NAME_DEF_STMT (maybe);
-
- if (bb_for_stmt (def) != bb)
- continue;
-
- if (TREE_CODE (def) == PHI_NODE
- || stmt_ann (def)->uid < first_store_uid[bb->index])
- {
- if (ANTIC_SAFE_LOADS (bb) == NULL)
- ANTIC_SAFE_LOADS (bb) = bitmap_set_new ();
- bitmap_value_insert_into_set (ANTIC_SAFE_LOADS (bb),
- expr);
- }
- }
- }
- }
- }
- free (first_store_uid);
-}
-