+ gimple oneh;
+ gimple_stmt_iterator gsi;
+
+ gsi = gsi_start (gimple_try_cleanup (one));
+ if (!gsi_one_before_end_p (gsi))
+ return;
+
+ oneh = gsi_stmt (gsi);
+ if (gimple_code (oneh) != GIMPLE_TRY
+ || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
+ return;
+
+ if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
+ {
+ gimple_seq seq = gimple_try_eval (oneh);
+
+ gimple_try_set_cleanup (one, seq);
+ gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
+ seq = copy_gimple_seq_and_replace_locals (seq);
+ gimple_seq_add_seq (&seq, gimple_try_eval (two));
+ gimple_try_set_eval (two, seq);
+ }
+}
+
+/* Perform EH refactoring optimizations that are simpler to do when code
+ flow has been lowered but EH structures haven't. */
+
+static void
+refactor_eh_r (gimple_seq seq)
+{
+ gimple_stmt_iterator gsi;
+ gimple one, two;
+
+ one = NULL;
+ two = NULL;
+ gsi = gsi_start (seq);
+ while (1)
+ {
+ one = two;
+ if (gsi_end_p (gsi))
+ two = NULL;
+ else
+ two = gsi_stmt (gsi);
+ if (one
+ && two
+ && gimple_code (one) == GIMPLE_TRY
+ && gimple_code (two) == GIMPLE_TRY
+ && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
+ && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
+ optimize_double_finally (one, two);
+ if (one)
+ switch (gimple_code (one))
+ {
+ case GIMPLE_TRY:
+ refactor_eh_r (gimple_try_eval (one));
+ refactor_eh_r (gimple_try_cleanup (one));
+ break;
+ case GIMPLE_CATCH:
+ refactor_eh_r (gimple_catch_handler (one));
+ break;
+ case GIMPLE_EH_FILTER:
+ refactor_eh_r (gimple_eh_filter_failure (one));
+ break;
+ default:
+ break;
+ }
+ if (two)
+ gsi_next (&gsi);
+ else
+ break;
+ }
+}
+
+static unsigned
+refactor_eh (void)
+{
+ refactor_eh_r (gimple_body (current_function_decl));
+ return 0;
+}
+
+struct gimple_opt_pass pass_refactor_eh =
+{
+ {
+ GIMPLE_PASS,
+ "ehopt", /* name */
+ NULL, /* gate */
+ refactor_eh, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_EH, /* tv_id */
+ PROP_gimple_lcf, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func /* todo_flags_finish */
+ }
+};
+
+/* Walk statements, see what regions are really references and remove unreachable ones. */
+
+static void
+tree_remove_unreachable_handlers (void)
+{
+ sbitmap reachable, contains_stmt;
+ VEC(int,heap) * label_to_region;
+ basic_block bb;
+
+ label_to_region = label_to_region_map ();
+ reachable = sbitmap_alloc (num_eh_regions ());
+ sbitmap_zero (reachable);
+ contains_stmt = sbitmap_alloc (num_eh_regions ());
+ sbitmap_zero (contains_stmt);
+
+ FOR_EACH_BB (bb)
+ {
+ gimple_stmt_iterator gsi;
+ int region;
+ bool has_eh_preds = bb_has_eh_pred (bb);
+
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+
+ if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
+ {
+ int uid = LABEL_DECL_UID (gimple_label_label (stmt));
+ int region;
+
+ for (region = VEC_index (int, label_to_region, uid);
+ region; region = get_next_region_sharing_label (region))
+ SET_BIT (reachable, region);
+ }
+ if (gimple_code (stmt) == GIMPLE_RESX)
+ SET_BIT (reachable,
+ VEC_index (eh_region, cfun->eh->region_array,
+ gimple_resx_region (stmt))->region_number);
+ if ((region = lookup_stmt_eh_region (stmt)) >= 0)
+ SET_BIT (contains_stmt, region);
+ }
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "Before removal of unreachable regions:\n");
+ dump_eh_tree (dump_file, cfun);
+ fprintf (dump_file, "Reachable regions: ");
+ dump_sbitmap_file (dump_file, reachable);
+ fprintf (dump_file, "Regions containing insns: ");
+ dump_sbitmap_file (dump_file, contains_stmt);
+ }
+
+ remove_unreachable_regions (reachable, contains_stmt);
+ sbitmap_free (reachable);
+ sbitmap_free (contains_stmt);
+ VEC_free (int, heap, label_to_region);
+ if (dump_file)
+ {
+ fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
+ dump_eh_tree (dump_file, cfun);
+ fprintf (dump_file, "\n\n");
+ }
+}
+
+/* Pattern match emtpy EH receiver looking like:
+
+ save_filt.6352_662 = [filter_expr] <<<filter object>>>;
+ save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
+ <<<exception object>>> = save_eptr.6351_663;
+ <<<filter object>>> = save_filt.6352_662;
+ resx 1
+
+ And various minor variants after DCE or copy propagation.
+ */
+
+static int
+tree_empty_eh_handler_p (basic_block bb)
+{
+ gimple_stmt_iterator gsi;
+ int region;
+ edge_iterator ei;
+ edge e;
+ use_operand_p imm_use;
+ gimple use_stmt;
+ bool found = false;
+
+ gsi = gsi_last_bb (bb);
+
+ /* RESX */
+ if (gsi_end_p (gsi))
+ return 0;
+ if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX)
+ return 0;
+ region = gimple_resx_region (gsi_stmt (gsi));
+
+ /* filter_object set. */
+ gsi_prev (&gsi);
+ if (gsi_end_p (gsi))
+ return 0;
+ if (gimple_code (gsi_stmt (gsi)) == GIMPLE_ASSIGN)
+ {
+ tree filter_tmp;
+ tree exc_ptr_tmp;
+
+ if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR)
+ return 0;
+ filter_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
+
+ /* filter_object set. */
+ gsi_prev (&gsi);
+ if (gsi_end_p (gsi))
+ return 0;
+ if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
+ return 0;
+ if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR)
+ return 0;
+ exc_ptr_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
+
+ /* exc_ptr get. */
+ if (TREE_CODE (exc_ptr_tmp) != EXC_PTR_EXPR)
+ {
+ gsi_prev (&gsi);
+ if (gsi_end_p (gsi))
+ return 0;
+ if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
+ return 0;
+ if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR)
+ return 0;
+ if (exc_ptr_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
+ return 0;
+ if (!single_imm_use (exc_ptr_tmp, &imm_use, &use_stmt))
+ return 0;
+ }
+
+ /* filter_object get. */
+ if (TREE_CODE (filter_tmp) != FILTER_EXPR)
+ {
+ gsi_prev (&gsi);
+ if (gsi_end_p (gsi))
+ return 0;
+ if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
+ return 0;
+ if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR)
+ return 0;
+ if (filter_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
+ return 0;
+ if (!single_imm_use (filter_tmp, &imm_use, &use_stmt))
+ return 0;
+ }
+
+ /* label. */
+ gsi_prev (&gsi);
+ if (gsi_end_p (gsi))
+ return 0;
+ }
+ if (gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
+ return 0;
+
+ /* Be sure that there is at least on EH region reaching the block directly.
+ After EH edge redirection, it is possible that block is reached by one handler
+ but resumed by different. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if ((e->flags & EDGE_EH))
+ found = true;
+ if (found)
+ return region;
+ return 0;
+}
+
+/* Return true if it is possible to remove basic block BB and propagate
+ through PHIs.
+
+ This means that every PHI in BB has all uses such that they are PHIs
+ of basic blocks reachable througt BB and they appears only in use
+ reachable by the edge from BB to the block contianing the use.
+
+ This is same as in merge-phi code, but in slightly more general setting
+ because BB can have multiple successors. */
+
+static bool
+all_phis_safe_to_merge (basic_block bb)
+{
+ gimple_stmt_iterator si;
+ bool ok = true;
+
+ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ {
+ gimple phi = gsi_stmt (si);
+ tree result = gimple_phi_result (phi);
+ gimple stmt;
+ use_operand_p imm_use;
+ imm_use_iterator imm_iter;
+
+ /* If the PHI's result is never used, then we can just
+ ignore it. */
+ if (has_zero_uses (result))
+ continue;
+ /* We can always rebuild virtuals if needed. */
+ if (!is_gimple_reg (result))
+ continue;
+ FOR_EACH_IMM_USE_STMT (stmt, imm_iter, result)
+ {
+ if (gimple_code (stmt) != GIMPLE_PHI)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file,
+ "PHI result has use in non-PHI statement.\n");
+ ok = false;
+ BREAK_FROM_IMM_USE_STMT (imm_iter);
+ }
+ else
+ FOR_EACH_IMM_USE_ON_STMT (imm_use, imm_iter)
+ {
+ edge e;
+ e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (imm_use));
+ if (e->src != bb)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "PHI has use in PHI not reached from"
+ "empty cleanup itself.\n");
+ ok = false;
+ break;
+ }
+ }
+ if (!ok)
+ BREAK_FROM_IMM_USE_STMT (imm_iter);
+ }
+ if (!ok)
+ return false;
+ }
+ return ok;
+}
+
+static bool dominance_info_invalidated;
+
+/* Information to pass into make_eh_edge_and_update_phi. */
+
+struct update_info
+{
+ basic_block bb_to_remove, bb;
+ edge edge_to_remove;
+};
+
+/* DATA points to update-info structure.
+ Like make_eh_edge create EH edge from DATA->bb to basic block containing
+ handler of REGION. In addition also update PHI operands by copying
+ operands from DATA->bb_to_remove. */
+
+static void
+make_eh_edge_and_update_phi (struct eh_region *region, void *data)
+{
+ struct update_info *info = (struct update_info *) data;
+ edge e, e2;
+ tree lab;
+ basic_block src, dst;
+ gimple_stmt_iterator si;
+
+ lab = get_eh_region_tree_label (region);
+
+ src = info->bb;
+ dst = label_to_block (lab);
+
+ e = find_edge (src, dst);
+ if (e)
+ {
+ gcc_assert (e->flags & EDGE_EH);
+ e->aux = e;
+ return;
+ }
+ dominance_info_invalidated = true;
+ e2 = find_edge (info->bb_to_remove, dst);
+ e = make_edge (src, dst, EDGE_EH);
+ e->aux = e;
+ gcc_assert (e2);
+ for (si = gsi_start_phis (dst); !gsi_end_p (si); gsi_next (&si))
+ {
+ gimple phi = gsi_stmt (si);
+ tree use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e2));
+ gimple def = (TREE_CODE (use) == SSA_NAME
+ ? SSA_NAME_DEF_STMT (use) : NULL);
+
+ if (def && gimple_bb (def) == info->bb_to_remove)
+ {
+ use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (def,
+ info->edge_to_remove));
+ gcc_assert (info->bb_to_remove == info->edge_to_remove->dest);
+ def = TREE_CODE (use) == SSA_NAME ? SSA_NAME_DEF_STMT (use) : NULL;
+ gcc_assert (!def
+ || gimple_bb (def) != info->bb_to_remove
+ || !is_gimple_reg (use));
+ }
+ SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), use);
+ }
+}
+
+/* Make EH edges corresponding to STMT while updating PHI nodes after removal
+ empty cleanup BB_TO_REMOVE joined to BB containing STMT
+ by EDGE_TO_REMOVE.
+
+ Return if EDGE_TO_REMOVE was really removed. It might stay reachable when
+ not all EH regions are cleaned up. */
+
+static bool
+update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
+{
+ int region_nr;
+ bool is_resx;
+ bool inlinable = false;
+ struct update_info info;
+ edge_iterator ei;
+ edge e;
+ int probability_sum = 0;
+ bool removed = false;
+
+ info.bb_to_remove = bb_to_remove;
+ info.bb = gimple_bb (stmt);
+ info.edge_to_remove = edge_to_remove;
+
+ if (gimple_code (stmt) == GIMPLE_RESX)
+ {
+ region_nr = gimple_resx_region (stmt);
+ is_resx = true;
+ }
+ else
+ {
+ region_nr = lookup_stmt_eh_region (stmt);
+ is_resx = false;
+ inlinable = inlinable_call_p (stmt);
+ }
+
+ /* First add new edges as neccesary. */
+ foreach_reachable_handler (region_nr, is_resx, inlinable,
+ make_eh_edge_and_update_phi, &info);
+
+ /* And remove edges we didn't marked. */
+ for (ei = ei_start (info.bb->succs); (e = ei_safe_edge (ei)); )
+ {
+ if ((e->flags & EDGE_EH) && !e->aux)
+ {
+ dominance_info_invalidated = true;
+ if (e == edge_to_remove)
+ removed = true;
+ remove_edge (e);
+ }
+ else
+ {
+ e->aux = NULL;
+ probability_sum += e->probability;
+ ei_next (&ei);
+ }
+ }
+
+ /* Make CFG profile more consistent assuming that exception will resume to
+ first available EH handler. In practice this makes little difference, but
+ we get fewer consistency errors in the dumps. */
+ if (is_resx && EDGE_COUNT (info.bb->succs) && !probability_sum)
+ EDGE_SUCC (info.bb, 0)->probability = REG_BR_PROB_BASE;
+ return removed;
+}
+
+/* Look for basic blocks containing empty exception handler and remove them.
+ This is similar to jump forwarding, just across EH edges. */
+
+static bool
+cleanup_empty_eh (basic_block bb, VEC(int,heap) * label_to_region)
+{
+ int region;
+ gimple_stmt_iterator si;
+ edge_iterator ei;
+
+ /* When handler of EH region winds up to be empty, we can safely
+ remove it. This leads to inner EH regions to be redirected
+ to outer one, if present in function. So we need to rebuild
+ EH edges in all sources. */
+ if ((region = tree_empty_eh_handler_p (bb))
+ && all_phis_safe_to_merge (bb))
+ {
+ edge e;
+ bool found = false, removed_some = false, has_non_eh_preds = false;
+ gimple_stmt_iterator gsi;
+
+ /* Look for all EH regions sharing label of this block.
+ If they are not same as REGION, remove them and replace them
+ by outer region of REGION. Also note if REGION itself is one
+ of them. */
+
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
+ {
+ int uid = LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi)));
+ int r = VEC_index (int, label_to_region, uid);
+ int next;
+
+ while (r)
+ {
+ next = get_next_region_sharing_label (r);
+ if (r == region)
+ found = true;
+ else
+ {
+ removed_some = true;
+ remove_eh_region_and_replace_by_outer_of (r, region);
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Empty EH handler %i removed and "
+ "replaced by %i\n", r, region);
+ }
+ r = next;
+ }
+ }
+ else
+ break;
+
+ gcc_assert (found || removed_some);
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (!(e->flags & EDGE_EH))
+ has_non_eh_preds = true;
+
+ /* When block is empty EH cleanup, but it is reachable via non-EH code too,
+ we can not remove the region it is resumed via, because doing so will
+ lead to redirection of its RESX edges.
+
+ This case will be handled later after edge forwarding if the EH cleanup
+ is really dead. */
+
+ if (found && !has_non_eh_preds)
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "Empty EH handler %i removed.\n", region);
+ remove_eh_region (region);
+ }
+ else if (!removed_some)
+ return false;
+
+ for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
+ {
+ basic_block src = e->src;
+ if (!(e->flags & EDGE_EH))
+ {
+ ei_next (&ei);
+ continue;
+ }
+ if (stmt_can_throw_internal (last_stmt (src)))
+ {
+ if (!update_eh_edges (last_stmt (src), bb, e))
+ ei_next (&ei);
+ }
+ else
+ remove_edge (e);
+ }
+
+ /* Verify that we eliminated all uses of PHI we are going to remove.
+ If we didn't, rebuild SSA on affected variable (this is allowed only
+ for virtuals). */
+ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ {
+ gimple phi = gsi_stmt (si);
+ tree result = gimple_phi_result (phi);
+ if (!has_zero_uses (result))
+ {
+ use_operand_p use_p;
+ imm_use_iterator iter;
+ gimple stmt;
+
+ FOR_EACH_IMM_USE_STMT (stmt, iter, result)
+ {
+ /* We have use, see if it won't disappear after
+ removing BB. */
+ if (gimple_bb (stmt) == bb)
+ continue;
+ if (gimple_code (stmt) == GIMPLE_PHI)
+ {
+ bool bad = false;
+
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ if (gimple_phi_arg_edge (stmt,
+ PHI_ARG_INDEX_FROM_USE (use_p))->src != bb)
+ {
+ bad = true;
+ break;
+ }
+
+ if (!bad)
+ continue;
+ }
+
+ gcc_assert (!is_gimple_reg (result));
+ mark_sym_for_renaming (SSA_NAME_VAR (result));
+ /* As we are going to delete this block we will release all
+ defs which makes the immediate uses on use stmts invalid.
+ Avoid that by replacing all uses with the bare variable
+ and updating the stmts. */
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, SSA_NAME_VAR (result));
+ update_stmt (stmt);
+ }
+ }
+ }
+ if (!ei_safe_edge (ei_start (bb->preds)))
+ delete_basic_block (bb);