+ for (phi = phi_nodes (bb);
+ phi;
+ phi = PHI_CHAIN (phi))
+ if (!is_gimple_reg (PHI_RESULT (phi)))
+ VEC_safe_push (tree, heap, phis, phi);
+ }
+
+ while (changed)
+ {
+ changed = false;
+
+ for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
+ {
+ size_t ver = SSA_NAME_VERSION (PHI_RESULT (phi));
+ use_operand_p usep;
+ ssa_op_iter iter;
+
+ if (vuse_names[ver] == NULL)
+ {
+ vuse_names[ver] = BITMAP_ALLOC (&grand_bitmap_obstack);
+ bitmap_set_bit (vuse_names[ver], ver);
+ }
+ FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
+ {
+ tree use = USE_FROM_PTR (usep);
+ bitmap usebitmap = get_representative (vuse_names,
+ SSA_NAME_VERSION (use));
+ if (usebitmap != NULL)
+ {
+ changed |= bitmap_ior_into (vuse_names[ver],
+ usebitmap);
+ }
+ else
+ {
+ changed |= !bitmap_bit_p (vuse_names[ver],
+ SSA_NAME_VERSION (use));
+ if (changed)
+ bitmap_set_bit (vuse_names[ver],
+ SSA_NAME_VERSION (use));
+ }
+ }
+ }
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
+ {
+ bitmap reps = get_representative (vuse_names,
+ SSA_NAME_VERSION (PHI_RESULT (phi)));
+ if (reps)
+ {
+ print_generic_expr (dump_file, PHI_RESULT (phi), 0);
+ fprintf (dump_file, " represents ");
+ dump_bitmap_of_names (dump_file, reps);
+ }
+ }
+ VEC_free (tree, heap, phis);
+}
+
+/* Compute reaching vuses and antic safe loads. RVUSE computation is
+ is a small bit of iterative dataflow to determine what virtual uses
+ reach what blocks. Because we can't generate overlapping virtual
+ uses, and virtual uses *do* actually die, this ends up being faster
+ in most cases than continually walking the virtual use/def chains
+ to determine whether we are inside a block where a given virtual is
+ still available to be used.
+
+ ANTIC_SAFE_LOADS are those loads that actually occur before any kill to
+ their vuses in the block,and thus, are safe at the top of the
+ block.
+
+ An example:
+
+ <block begin>
+ b = *a
+ *a = 9
+ <block end>
+
+ b = *a is an antic safe load because it still safe to consider it
+ ANTIC at the top of the block.
+
+ We currently compute a conservative approximation to
+ ANTIC_SAFE_LOADS. We compute those loads that occur before *any*
+ stores in the block. This is not because it is difficult to
+ compute the precise answer, but because it is expensive. More
+ testing is necessary to determine whether it is worth computing the
+ precise answer. */
+
+static void
+compute_rvuse_and_antic_safe (void)
+{
+
+ size_t i;
+ tree phi;
+ basic_block bb;
+ int *postorder;
+ bool changed = true;
+ unsigned int *first_store_uid;
+
+ first_store_uid = xcalloc (n_basic_blocks, sizeof (unsigned int));
+
+ compute_vuse_representatives ();
+
+ FOR_ALL_BB (bb)
+ {
+ RVUSE_IN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
+ RVUSE_GEN (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
+ RVUSE_KILL (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
+ RVUSE_OUT (bb) = BITMAP_ALLOC (&grand_bitmap_obstack);
+ ANTIC_SAFE_LOADS (bb) = NULL;
+ }
+
+ /* Mark live on entry */
+ for (i = 0; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ if (name && !is_gimple_reg (name)
+ && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
+ bitmap_set_bit (RVUSE_OUT (ENTRY_BLOCK_PTR),
+ SSA_NAME_VERSION (name));
+ }
+
+ /* Compute local sets for reaching vuses.
+ GEN(block) = generated in block and not locally killed.
+ KILL(block) = set of vuses killed in block.
+ */
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ ssa_op_iter iter;
+ def_operand_p defp;
+ use_operand_p usep;
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ if (first_store_uid[bb->index] == 0
+ && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
+ | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
+ {
+ first_store_uid[bb->index] = stmt_ann (stmt)->uid;
+ }
+
+
+ FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
+ | SSA_OP_VMAYUSE)
+ {
+ tree use = USE_FROM_PTR (usep);
+ bitmap repbit = get_representative (vuse_names,
+ SSA_NAME_VERSION (use));
+ if (repbit != NULL)
+ {
+ bitmap_and_compl_into (RVUSE_GEN (bb), repbit);
+ bitmap_ior_into (RVUSE_KILL (bb), repbit);
+ }
+ else
+ {
+ bitmap_set_bit (RVUSE_KILL (bb), SSA_NAME_VERSION (use));
+ bitmap_clear_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (use));
+ }
+ }
+ FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_VIRTUAL_DEFS)
+ {
+ tree def = DEF_FROM_PTR (defp);
+ bitmap_set_bit (RVUSE_GEN (bb), SSA_NAME_VERSION (def));
+ }
+ }
+ }
+
+ FOR_EACH_BB (bb)
+ {
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ if (!is_gimple_reg (PHI_RESULT (phi)))
+ {
+ edge e;
+ edge_iterator ei;
+
+ tree def = PHI_RESULT (phi);
+ /* In reality, the PHI result is generated at the end of
+ each predecessor block. This will make the value
+ LVUSE_IN for the bb containing the PHI, which is
+ correct. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ bitmap_set_bit (RVUSE_GEN (e->src), SSA_NAME_VERSION (def));
+ }
+ }
+ }
+
+ /* Solve reaching vuses.
+
+ RVUSE_IN[BB] = Union of RVUSE_OUT of predecessors.
+ RVUSE_OUT[BB] = RVUSE_GEN[BB] U (RVUSE_IN[BB] - RVUSE_KILL[BB])
+ */
+ postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
+ pre_and_rev_post_order_compute (NULL, postorder, false);
+
+ changed = true;
+ while (changed)
+ {
+ int j;
+ changed = false;
+ for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
+ {
+ edge e;
+ edge_iterator ei;
+ bb = BASIC_BLOCK (postorder[j]);
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ bitmap_ior_into (RVUSE_IN (bb), RVUSE_OUT (e->src));
+
+ changed |= bitmap_ior_and_compl (RVUSE_OUT (bb),
+ RVUSE_GEN (bb),
+ RVUSE_IN (bb),
+ RVUSE_KILL (bb));
+ }
+ }
+ free (postorder);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ FOR_ALL_BB (bb)
+ {
+ fprintf (dump_file, "RVUSE_IN (%d) =", bb->index);
+ dump_bitmap_of_names (dump_file, RVUSE_IN (bb));
+
+ fprintf (dump_file, "RVUSE_KILL (%d) =", bb->index);
+ dump_bitmap_of_names (dump_file, RVUSE_KILL (bb));
+
+ fprintf (dump_file, "RVUSE_GEN (%d) =", bb->index);
+ dump_bitmap_of_names (dump_file, RVUSE_GEN (bb));
+
+ fprintf (dump_file, "RVUSE_OUT (%d) =", bb->index);
+ dump_bitmap_of_names (dump_file, RVUSE_OUT (bb));
+ }
+ }
+
+ FOR_EACH_BB (bb)
+ {
+ value_set_node_t node;
+ if (bitmap_empty_p (RVUSE_KILL (bb)))
+ continue;
+
+ for (node = EXP_GEN (bb)->head; node; node = node->next)
+ {
+ if (REFERENCE_CLASS_P (node->expr))
+ {
+ tree vh = get_value_handle (node->expr);
+ tree maybe = bitmap_find_leader (AVAIL_OUT (bb), vh);
+
+ if (maybe)
+ {
+ tree def = SSA_NAME_DEF_STMT (maybe);
+
+ if (bb_for_stmt (def) != bb)
+ continue;
+
+ if (TREE_CODE (def) == PHI_NODE
+ || stmt_ann (def)->uid < first_store_uid[bb->index])
+ {
+ if (ANTIC_SAFE_LOADS (bb) == NULL)
+ ANTIC_SAFE_LOADS (bb) = set_new (true);
+ value_insert_into_set (ANTIC_SAFE_LOADS (bb),
+ node->expr);
+ }
+ }
+ }
+ }
+ }
+ free (first_store_uid);
+}
+
+/* Return true if we can value number the call in STMT. This is true
+ if we have a pure or constant call. */
+
+static bool
+can_value_number_call (tree stmt)
+{
+ tree call = get_call_expr_in (stmt);
+
+ if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
+ return true;
+ return false;
+}
+
+/* Return true if OP is a tree which we can perform value numbering
+ on. */
+
+static bool
+can_value_number_operation (tree op)
+{
+ return UNARY_CLASS_P (op)
+ || BINARY_CLASS_P (op)
+ || COMPARISON_CLASS_P (op)
+ || REFERENCE_CLASS_P (op)
+ || (TREE_CODE (op) == CALL_EXPR
+ && can_value_number_call (op));
+}
+
+
+/* Return true if OP is a tree which we can perform PRE on
+ on. This may not match the operations we can value number, but in
+ a perfect world would. */
+
+static bool
+can_PRE_operation (tree op)
+{
+ return UNARY_CLASS_P (op)
+ || BINARY_CLASS_P (op)
+ || COMPARISON_CLASS_P (op)
+ || TREE_CODE (op) == INDIRECT_REF
+ || TREE_CODE (op) == COMPONENT_REF
+ || TREE_CODE (op) == CALL_EXPR
+ || TREE_CODE (op) == ARRAY_REF;
+}
+
+
+/* Inserted expressions are placed onto this worklist, which is used
+ for performing quick dead code elimination of insertions we made
+ that didn't turn out to be necessary. */
+static VEC(tree,heap) *inserted_exprs;
+
+/* Pool allocated fake store expressions are placed onto this
+ worklist, which, after performing dead code elimination, is walked
+ to see which expressions need to be put into GC'able memory */
+static VEC(tree, heap) *need_creation;
+
+/* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
+ COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
+ trying to rename aggregates into ssa form directly, which is a no
+ no.
+
+ Thus, this routine doesn't create temporaries, it just builds a
+ single access expression for the array, calling
+ find_or_generate_expression to build the innermost pieces.
+
+ This function is a subroutine of create_expression_by_pieces, and
+ should not be called on it's own unless you really know what you
+ are doing.
+*/
+static tree
+create_component_ref_by_pieces (basic_block block, tree expr, tree stmts)
+{
+ tree genop = expr;
+ tree folded;
+
+ if (TREE_CODE (genop) == VALUE_HANDLE)
+ {
+ tree found = bitmap_find_leader (AVAIL_OUT (block), expr);
+ if (found)
+ return found;
+ }
+
+ if (TREE_CODE (genop) == VALUE_HANDLE)
+ genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
+
+ switch TREE_CODE (genop)
+ {
+ case ARRAY_REF:
+ {
+ tree op0;
+ tree op1, op2, op3;
+ op0 = create_component_ref_by_pieces (block,
+ TREE_OPERAND (genop, 0),
+ stmts);
+ op1 = TREE_OPERAND (genop, 1);
+ if (TREE_CODE (op1) == VALUE_HANDLE)
+ op1 = find_or_generate_expression (block, op1, stmts);
+ op2 = TREE_OPERAND (genop, 2);
+ if (op2 && TREE_CODE (op2) == VALUE_HANDLE)
+ op2 = find_or_generate_expression (block, op2, stmts);
+ op3 = TREE_OPERAND (genop, 3);
+ if (op3 && TREE_CODE (op3) == VALUE_HANDLE)
+ op3 = find_or_generate_expression (block, op3, stmts);
+ folded = build4 (ARRAY_REF, TREE_TYPE (genop), op0, op1,
+ op2, op3);
+ return folded;
+ }
+ case COMPONENT_REF:
+ {
+ tree op0;
+ tree op1;
+ op0 = create_component_ref_by_pieces (block,
+ TREE_OPERAND (genop, 0),
+ stmts);
+ op1 = VALUE_HANDLE_EXPR_SET (TREE_OPERAND (genop, 1))->head->expr;
+ folded = fold_build3 (COMPONENT_REF, TREE_TYPE (genop), op0, op1,
+ NULL_TREE);
+ return folded;
+ }
+ break;
+ case INDIRECT_REF:
+ {
+ tree op1 = TREE_OPERAND (genop, 0);
+ tree genop1 = find_or_generate_expression (block, op1, stmts);
+
+ folded = fold_build1 (TREE_CODE (genop), TREE_TYPE (genop),
+ genop1);
+ return folded;
+ }
+ break;
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ case SSA_NAME:
+ case STRING_CST:
+ return genop;
+ default:
+ gcc_unreachable ();
+ }
+
+ return NULL_TREE;
+}
+
+/* Find a leader for an expression, or generate one using
+ create_expression_by_pieces if it's ANTIC but
+ complex.
+ BLOCK is the basic_block we are looking for leaders in.
+ EXPR is the expression to find a leader or generate for.
+ STMTS is the statement list to put the inserted expressions on.
+ Returns the SSA_NAME of the LHS of the generated expression or the
+ leader. */
+
+static tree
+find_or_generate_expression (basic_block block, tree expr, tree stmts)
+{
+ tree genop = bitmap_find_leader (AVAIL_OUT (block), expr);
+
+ /* If it's still NULL, it must be a complex expression, so generate
+ it recursively. */
+ if (genop == NULL)
+ {
+ genop = VALUE_HANDLE_EXPR_SET (expr)->head->expr;
+
+ gcc_assert (can_PRE_operation (genop));
+ genop = create_expression_by_pieces (block, genop, stmts);
+ }
+ return genop;
+}
+
+#define NECESSARY(stmt) stmt->common.asm_written_flag
+/* Create an expression in pieces, so that we can handle very complex
+ expressions that may be ANTIC, but not necessary GIMPLE.
+ BLOCK is the basic block the expression will be inserted into,
+ EXPR is the expression to insert (in value form)
+ STMTS is a statement list to append the necessary insertions into.
+
+ This function will die if we hit some value that shouldn't be
+ ANTIC but is (IE there is no leader for it, or its components).
+ This function may also generate expressions that are themselves
+ partially or fully redundant. Those that are will be either made
+ fully redundant during the next iteration of insert (for partially
+ redundant ones), or eliminated by eliminate (for fully redundant
+ ones). */
+
+static tree
+create_expression_by_pieces (basic_block block, tree expr, tree stmts)
+{
+ tree temp, name;
+ tree folded, forced_stmts, newexpr;
+ tree v;
+ tree_stmt_iterator tsi;
+
+ switch (TREE_CODE_CLASS (TREE_CODE (expr)))
+ {
+ case tcc_expression:
+ {
+ tree op0, op2;
+ tree arglist;
+ tree genop0, genop2;
+ tree genarglist;
+ tree walker, genwalker;
+
+ gcc_assert (TREE_CODE (expr) == CALL_EXPR);
+ genop2 = NULL;
+
+ op0 = TREE_OPERAND (expr, 0);
+ arglist = TREE_OPERAND (expr, 1);
+ op2 = TREE_OPERAND (expr, 2);
+
+ genop0 = find_or_generate_expression (block, op0, stmts);
+ genarglist = copy_list (arglist);
+ for (walker = arglist, genwalker = genarglist;
+ genwalker && walker;
+ genwalker = TREE_CHAIN (genwalker), walker = TREE_CHAIN (walker))
+ {
+ TREE_VALUE (genwalker)
+ = find_or_generate_expression (block, TREE_VALUE (walker),
+ stmts);
+ }
+
+ if (op2)
+ genop2 = find_or_generate_expression (block, op2, stmts);
+ folded = fold_build3 (TREE_CODE (expr), TREE_TYPE (expr),
+ genop0, genarglist, genop2);