+/* Perform insertion of partially redundant values.
+ For BLOCK, do the following:
+ 1. Propagate the NEW_SETS of the dominator into the current block.
+ If the block has multiple predecessors,
+ 2a. Iterate over the ANTIC expressions for the block to see if
+ any of them are partially redundant.
+ 2b. If so, insert them into the necessary predecessors to make
+ the expression fully redundant.
+ 2c. Insert a new PHI merging the values of the predecessors.
+ 2d. Insert the new PHI, and the new expressions, into the
+ NEW_SETS set.
+ 3. Recursively call ourselves on the dominator children of BLOCK.
+
+ Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
+ do_regular_insertion and do_partial_insertion.
+
+*/
+
+static bool
+do_regular_insertion (basic_block block, basic_block dom)
+{
+ bool new_stuff = false;
+ VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
+ tree expr;
+ int i;
+
+ for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
+ {
+ if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
+ {
+ tree *avail;
+ tree val;
+ bool by_some = false;
+ bool cant_insert = false;
+ bool all_same = true;
+ tree first_s = NULL;
+ edge pred;
+ basic_block bprime;
+ tree eprime = NULL_TREE;
+ edge_iterator ei;
+
+ val = get_value_handle (expr);
+ if (bitmap_set_contains_value (PHI_GEN (block), val))
+ continue;
+ if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Found fully redundant value\n");
+ continue;
+ }
+
+ avail = XCNEWVEC (tree, last_basic_block);
+ FOR_EACH_EDGE (pred, ei, block->preds)
+ {
+ tree vprime;
+ tree edoubleprime;
+
+ /* This can happen in the very weird case
+ that our fake infinite loop edges have caused a
+ critical edge to appear. */
+ if (EDGE_CRITICAL_P (pred))
+ {
+ cant_insert = true;
+ break;
+ }
+ bprime = pred->src;
+ eprime = phi_translate (expr, ANTIC_IN (block), NULL,
+ bprime, block);
+
+ /* eprime will generally only be NULL if the
+ value of the expression, translated
+ through the PHI for this predecessor, is
+ undefined. If that is the case, we can't
+ make the expression fully redundant,
+ because its value is undefined along a
+ predecessor path. We can thus break out
+ early because it doesn't matter what the
+ rest of the results are. */
+ if (eprime == NULL)
+ {
+ cant_insert = true;
+ break;
+ }
+
+ eprime = fully_constant_expression (eprime);
+ vprime = get_value_handle (eprime);
+ gcc_assert (vprime);
+ edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
+ vprime);
+ if (edoubleprime == NULL)
+ {
+ avail[bprime->index] = eprime;
+ all_same = false;
+ }
+ else
+ {
+ avail[bprime->index] = edoubleprime;
+ by_some = true;
+ if (first_s == NULL)
+ first_s = edoubleprime;
+ else if (!operand_equal_p (first_s, edoubleprime,
+ 0))
+ all_same = false;
+ }
+ }
+ /* If we can insert it, it's not the same value
+ already existing along every predecessor, and
+ it's defined by some predecessor, it is
+ partially redundant. */
+ if (!cant_insert && !all_same && by_some)
+ {
+ if (insert_into_preds_of_block (block, get_expression_id (expr),
+ avail))
+ new_stuff = true;
+ }
+ /* If all edges produce the same value and that value is
+ an invariant, then the PHI has the same value on all
+ edges. Note this. */
+ else if (!cant_insert && all_same && eprime
+ && is_gimple_min_invariant (eprime)
+ && !is_gimple_min_invariant (val))
+ {
+ unsigned int j;
+ bitmap_iterator bi;
+
+ bitmap_set_t exprset = VALUE_HANDLE_EXPR_SET (val);
+ FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi)
+ {
+ tree expr = expression_for_id (j);
+ if (TREE_CODE (expr) == SSA_NAME)
+ {
+ vn_add (expr, eprime);
+ pre_stats.constified++;
+ }
+ }
+ }
+ free (avail);
+ }
+ }
+
+ VEC_free (tree, heap, exprs);
+ return new_stuff;
+}
+
+
+/* Perform insertion for partially anticipatable expressions. There
+ is only one case we will perform insertion for these. This case is
+ if the expression is partially anticipatable, and fully available.
+ In this case, we know that putting it earlier will enable us to
+ remove the later computation. */
+
+
+static bool
+do_partial_partial_insertion (basic_block block, basic_block dom)
+{
+ bool new_stuff = false;
+ VEC (tree, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
+ tree expr;
+ int i;
+
+ for (i = 0; VEC_iterate (tree, exprs, i, expr); i++)
+ {
+ if (can_PRE_operation (expr) && !AGGREGATE_TYPE_P (TREE_TYPE (expr)))
+ {
+ tree *avail;
+ tree val;
+ bool by_all = true;
+ bool cant_insert = false;
+ edge pred;
+ basic_block bprime;
+ tree eprime = NULL_TREE;
+ edge_iterator ei;
+
+ val = get_value_handle (expr);
+ if (bitmap_set_contains_value (PHI_GEN (block), val))
+ continue;
+ if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
+ continue;
+
+ avail = XCNEWVEC (tree, last_basic_block);
+ FOR_EACH_EDGE (pred, ei, block->preds)
+ {
+ tree vprime;
+ tree edoubleprime;
+
+ /* This can happen in the very weird case
+ that our fake infinite loop edges have caused a
+ critical edge to appear. */
+ if (EDGE_CRITICAL_P (pred))
+ {
+ cant_insert = true;
+ break;
+ }
+ bprime = pred->src;
+ eprime = phi_translate (expr, ANTIC_IN (block),
+ PA_IN (block),
+ bprime, block);
+
+ /* eprime will generally only be NULL if the
+ value of the expression, translated
+ through the PHI for this predecessor, is
+ undefined. If that is the case, we can't
+ make the expression fully redundant,
+ because its value is undefined along a
+ predecessor path. We can thus break out
+ early because it doesn't matter what the
+ rest of the results are. */
+ if (eprime == NULL)
+ {
+ cant_insert = true;
+ break;
+ }
+
+ eprime = fully_constant_expression (eprime);
+ vprime = get_value_handle (eprime);
+ gcc_assert (vprime);
+ edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
+ vprime);
+ if (edoubleprime == NULL)
+ {
+ by_all = false;
+ break;
+ }
+ else
+ avail[bprime->index] = edoubleprime;
+
+ }
+
+ /* If we can insert it, it's not the same value
+ already existing along every predecessor, and
+ it's defined by some predecessor, it is
+ partially redundant. */
+ if (!cant_insert && by_all)
+ {
+ pre_stats.pa_insert++;
+ if (insert_into_preds_of_block (block, get_expression_id (expr),
+ avail))
+ new_stuff = true;
+ }
+ free (avail);
+ }
+ }
+
+ VEC_free (tree, heap, exprs);
+ return new_stuff;
+}