X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-dce.c;h=ccdf14a17027acc3061bcdf16c29f67593b1770e;hb=ac84c82211756a7508c6f772097dc7beb3359f99;hp=fdfdda5e95f761bc80dffae7909b9aa2555668e0;hpb=b18878555c397f5aab203df348bfa33d114ee242;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c index fdfdda5e95f..ccdf14a1702 100644 --- a/gcc/tree-ssa-dce.c +++ b/gcc/tree-ssa-dce.c @@ -1,22 +1,22 @@ /* Dead code elimination pass for the GNU compiler. - Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 + Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc. Contributed by Ben Elliston and Andrew MacLeod Adapted to use control dependence by Steven Bosscher, SUSE Labs. - + This file is part of GCC. - + GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. - + GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - + You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see . */ @@ -47,17 +47,11 @@ along with GCC; see the file COPYING3. If not see #include "system.h" #include "coretypes.h" #include "tm.h" -#include "ggc.h" - -/* These RTL headers are needed for basic-block.h. */ -#include "rtl.h" -#include "tm_p.h" -#include "hard-reg-set.h" -#include "obstack.h" -#include "basic-block.h" #include "tree.h" -#include "diagnostic.h" +#include "tree-pretty-print.h" +#include "gimple-pretty-print.h" +#include "basic-block.h" #include "tree-flow.h" #include "gimple.h" #include "tree-dump.h" @@ -83,8 +77,8 @@ static VEC(gimple,heap) *worklist; as necessary. */ static sbitmap processed; -/* Vector indicating that last_stmt if a basic block has already been - marked as necessary. */ +/* Vector indicating that the last statement of a basic block has already + been marked as necessary. */ static sbitmap last_stmt_necessary; /* Vector indicating that BB contains statements that are live. */ @@ -203,6 +197,7 @@ find_all_control_dependences (struct edge_list *el) /* If STMT is not already marked necessary, mark it, and add it to the worklist if ADD_TO_WORKLIST is true. */ + static inline void mark_stmt_necessary (gimple stmt, bool add_to_worklist) { @@ -221,7 +216,7 @@ mark_stmt_necessary (gimple stmt, bool add_to_worklist) gimple_set_plf (stmt, STMT_NECESSARY, true); if (add_to_worklist) VEC_safe_push (gimple, heap, worklist, stmt); - if (bb_contains_live_stmts) + if (bb_contains_live_stmts && !is_gimple_debug (stmt)) SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index); } @@ -276,11 +271,9 @@ mark_operand_necessary (tree op) static void mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) { - tree lhs = NULL_TREE; /* With non-call exceptions, we have to assume that all statements could throw. If a statement may throw, it is inherently necessary. */ - if (flag_non_call_exceptions - && stmt_could_throw_p (stmt)) + if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt)) { mark_stmt_necessary (stmt, true); return; @@ -306,32 +299,43 @@ mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) return; case GIMPLE_CALL: - /* Most, but not all function calls are required. Function calls that - produce no result and have no side effects (i.e. const pure - functions) are unnecessary. */ - if (gimple_has_side_effects (stmt)) - { - mark_stmt_necessary (stmt, true); - return; - } - if (!gimple_call_lhs (stmt)) - return; - lhs = gimple_call_lhs (stmt); - /* Fall through */ + { + tree callee = gimple_call_fndecl (stmt); + if (callee != NULL_TREE + && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) + switch (DECL_FUNCTION_CODE (callee)) + { + case BUILT_IN_MALLOC: + case BUILT_IN_CALLOC: + case BUILT_IN_ALLOCA: + case BUILT_IN_ALLOCA_WITH_ALIGN: + return; - case GIMPLE_ASSIGN: - if (!lhs) - lhs = gimple_assign_lhs (stmt); - /* These values are mildly magic bits of the EH runtime. We can't - see the entire lifetime of these values until landing pads are - generated. */ - if (TREE_CODE (lhs) == EXC_PTR_EXPR - || TREE_CODE (lhs) == FILTER_EXPR) - { - mark_stmt_necessary (stmt, true); + default:; + } + /* Most, but not all function calls are required. Function calls that + produce no result and have no side effects (i.e. const pure + functions) are unnecessary. */ + if (gimple_has_side_effects (stmt)) + { + mark_stmt_necessary (stmt, true); + return; + } + if (!gimple_call_lhs (stmt)) return; - } - break; + break; + } + + case GIMPLE_DEBUG: + /* Debug temps without a value are not useful. ??? If we could + easily locate the debug temp bind stmt for a use thereof, + would could refrain from marking all debug temps here, and + mark them only if they're used. */ + if (!gimple_debug_bind_p (stmt) + || gimple_debug_bind_has_value_p (stmt) + || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) + mark_stmt_necessary (stmt, false); + return; case GIMPLE_GOTO: gcc_assert (!simple_goto_p (stmt)); @@ -347,6 +351,12 @@ mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) mark_stmt_necessary (stmt, true); break; + case GIMPLE_ASSIGN: + if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME + && TREE_CLOBBER_P (gimple_assign_rhs1 (stmt))) + return; + break; + default: break; } @@ -370,14 +380,34 @@ mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive) } -/* Make corresponding control dependent edges necessary. We only - have to do this once for each basic block, so we clear the bitmap - after we're done. */ +/* Mark the last statement of BB as necessary. */ + static void -mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el) +mark_last_stmt_necessary (basic_block bb) +{ + gimple stmt = last_stmt (bb); + + SET_BIT (last_stmt_necessary, bb->index); + SET_BIT (bb_contains_live_stmts, bb->index); + + /* We actually mark the statement only if it is a control statement. */ + if (stmt && is_ctrl_stmt (stmt)) + mark_stmt_necessary (stmt, true); +} + + +/* Mark control dependent edges of BB as necessary. We have to do this only + once for each basic block so we set the appropriate bit after we're done. + + When IGNORE_SELF is true, ignore BB in the list of control dependences. */ + +static void +mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el, + bool ignore_self) { bitmap_iterator bi; unsigned edge_number; + bool skipped = false; gcc_assert (bb != EXIT_BLOCK_PTR); @@ -386,18 +416,20 @@ mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el) EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number) { - gimple stmt; basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number); - if (TEST_BIT (last_stmt_necessary, cd_bb->index)) - continue; - SET_BIT (last_stmt_necessary, cd_bb->index); - SET_BIT (bb_contains_live_stmts, cd_bb->index); + if (ignore_self && cd_bb == bb) + { + skipped = true; + continue; + } - stmt = last_stmt (cd_bb); - if (stmt && is_ctrl_stmt (stmt)) - mark_stmt_necessary (stmt, true); + if (!TEST_BIT (last_stmt_necessary, cd_bb->index)) + mark_last_stmt_necessary (cd_bb); } + + if (!skipped) + SET_BIT (visited_control_parents, bb->index); } @@ -415,6 +447,7 @@ find_obviously_necessary_stmts (struct edge_list *el) gimple_stmt_iterator gsi; edge e; gimple phi, stmt; + int flags; FOR_EACH_BB (bb) { @@ -436,9 +469,8 @@ find_obviously_necessary_stmts (struct edge_list *el) /* Pure and const functions are finite and thus have no infinite loops in them. */ - if ((TREE_READONLY (current_function_decl) - || DECL_PURE_P (current_function_decl)) - && !DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)) + flags = flags_from_decl_or_type (current_function_decl); + if ((flags & (ECF_CONST|ECF_PURE)) && !(flags & ECF_LOOPING_CONST_OR_PURE)) return; /* Prevent the empty possibly infinite loops from being removed. */ @@ -458,7 +490,7 @@ find_obviously_necessary_stmts (struct edge_list *el) if (dump_file) fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n", e->src->index, e->dest->index); - mark_control_dependent_edges_necessary (e->dest, el); + mark_control_dependent_edges_necessary (e->dest, el, false); } } @@ -467,7 +499,7 @@ find_obviously_necessary_stmts (struct edge_list *el) { if (dump_file) fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num); - mark_control_dependent_edges_necessary (loop->latch, el); + mark_control_dependent_edges_necessary (loop->latch, el, false); } scev_finalize (); } @@ -479,8 +511,12 @@ find_obviously_necessary_stmts (struct edge_list *el) static bool ref_may_be_aliased (tree ref) { + gcc_assert (TREE_CODE (ref) != WITH_SIZE_EXPR); while (handled_component_p (ref)) ref = TREE_OPERAND (ref, 0); + if (TREE_CODE (ref) == MEM_REF + && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR) + ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0); return !(DECL_P (ref) && !may_be_aliased (ref)); } @@ -488,17 +524,18 @@ ref_may_be_aliased (tree ref) static bitmap visited = NULL; static unsigned int longest_chain = 0; static unsigned int total_chain = 0; +static unsigned int nr_walks = 0; static bool chain_ovfl = false; /* Worker for the walker that marks reaching definitions of REF, which is based on a non-aliased decl, necessary. It returns true whenever the defining statement of the current VDEF is a kill for REF, as no dominating may-defs are necessary for REF - anymore. DATA points to cached get_ref_base_and_extent data for REF. */ + anymore. DATA points to the basic-block that contains the + stmt that refers to REF. */ static bool -mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, - void *data ATTRIBUTE_UNUSED) +mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data) { gimple def_stmt = SSA_NAME_DEF_STMT (vdef); @@ -507,7 +544,14 @@ mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, /* If the stmt lhs kills ref, then we can stop walking. */ if (gimple_has_lhs (def_stmt) - && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME) + && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME + /* The assignment is not necessarily carried out if it can throw + and we can catch it in the current function where we could inspect + the previous value. + ??? We only need to care about the RHS throwing. For aggregate + assignments or similar calls and non-call exceptions the LHS + might throw as well. */ + && !stmt_can_throw_internal (def_stmt)) { tree base, lhs = gimple_get_lhs (def_stmt); HOST_WIDE_INT size, offset, max_size; @@ -528,6 +572,12 @@ mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, } /* Or they need to be exactly the same. */ else if (ref->ref + /* Make sure there is no induction variable involved + in the references (gcc.c-torture/execute/pr42142.c). + The simplest way is to check if the kill dominates + the use. */ + && dominated_by_p (CDI_DOMINATORS, (basic_block) data, + gimple_bb (def_stmt)) && operand_equal_p (ref->ref, lhs, 0)) return true; } @@ -546,10 +596,11 @@ mark_aliased_reaching_defs_necessary (gimple stmt, tree ref) ao_ref_init (&refd, ref); chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_aliased_reaching_defs_necessary_1, - NULL, NULL); + gimple_bb (stmt), NULL); if (chain > longest_chain) longest_chain = chain; total_chain += chain; + nr_walks++; } /* Worker for the walker that marks reaching definitions of REF, which @@ -583,6 +634,26 @@ mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED, return false; } + /* We want to skip statments that do not constitute stores but have + a virtual definition. */ + if (is_gimple_call (def_stmt)) + { + tree callee = gimple_call_fndecl (def_stmt); + if (callee != NULL_TREE + && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) + switch (DECL_FUNCTION_CODE (callee)) + { + case BUILT_IN_MALLOC: + case BUILT_IN_CALLOC: + case BUILT_IN_ALLOCA: + case BUILT_IN_ALLOCA_WITH_ALIGN: + case BUILT_IN_FREE: + return false; + + default:; + } + } + mark_operand_necessary (vdef); return false; @@ -611,7 +682,7 @@ degenerate_phi_p (gimple phi) /* Propagate necessity using the operands of necessary statements. Process the uses on each statement in the worklist, and add all feeding statements which contribute to the calculation of this - value to the worklist. + value to the worklist. In conservative mode, EL is NULL. */ @@ -619,7 +690,7 @@ static void propagate_necessity (struct edge_list *el) { gimple stmt; - bool aggressive = (el ? true : false); + bool aggressive = (el ? true : false); if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "\nProcessing worklist:\n"); @@ -638,16 +709,13 @@ propagate_necessity (struct edge_list *el) if (aggressive) { - /* Mark the last statements of the basic blocks that the block - containing STMT is control dependent on, but only if we haven't + /* Mark the last statement of the basic blocks on which the block + containing STMT is control dependent, but only if we haven't already done so. */ basic_block bb = gimple_bb (stmt); if (bb != ENTRY_BLOCK_PTR - && ! TEST_BIT (visited_control_parents, bb->index)) - { - SET_BIT (visited_control_parents, bb->index); - mark_control_dependent_edges_necessary (bb, el); - } + && !TEST_BIT (visited_control_parents, bb->index)) + mark_control_dependent_edges_necessary (bb, el, false); } if (gimple_code (stmt) == GIMPLE_PHI @@ -670,28 +738,121 @@ propagate_necessity (struct edge_list *el) mark_operand_necessary (arg); } + /* For PHI operands it matters from where the control flow arrives + to the BB. Consider the following example: + + a=exp1; + b=exp2; + if (test) + ; + else + ; + c=PHI(a,b) + + We need to mark control dependence of the empty basic blocks, since they + contains computation of PHI operands. + + Doing so is too restrictive in the case the predecestor block is in + the loop. Consider: + + if (b) + { + int i; + for (i = 0; i<1000; ++i) + ; + j = 0; + } + return j; + + There is PHI for J in the BB containing return statement. + In this case the control dependence of predecestor block (that is + within the empty loop) also contains the block determining number + of iterations of the block that would prevent removing of empty + loop in this case. + + This scenario can be avoided by splitting critical edges. + To save the critical edge splitting pass we identify how the control + dependence would look like if the edge was split. + + Consider the modified CFG created from current CFG by splitting + edge B->C. In the postdominance tree of modified CFG, C' is + always child of C. There are two cases how chlids of C' can look + like: + + 1) C' is leaf + + In this case the only basic block C' is control dependent on is B. + + 2) C' has single child that is B + + In this case control dependence of C' is same as control + dependence of B in original CFG except for block B itself. + (since C' postdominate B in modified CFG) + + Now how to decide what case happens? There are two basic options: + + a) C postdominate B. Then C immediately postdominate B and + case 2 happens iff there is no other way from B to C except + the edge B->C. + + There is other way from B to C iff there is succesor of B that + is not postdominated by B. Testing this condition is somewhat + expensive, because we need to iterate all succesors of B. + We are safe to assume that this does not happen: we will mark B + as needed when processing the other path from B to C that is + conrol dependent on B and marking control dependencies of B + itself is harmless because they will be processed anyway after + processing control statement in B. + + b) C does not postdominate B. Always case 1 happens since there is + path from C to exit that does not go through B and thus also C'. */ + if (aggressive && !degenerate_phi_p (stmt)) { for (k = 0; k < gimple_phi_num_args (stmt); k++) { basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src; - if (arg_bb != ENTRY_BLOCK_PTR - && ! TEST_BIT (visited_control_parents, arg_bb->index)) + + if (gimple_bb (stmt) + != get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb)) { - SET_BIT (visited_control_parents, arg_bb->index); - mark_control_dependent_edges_necessary (arg_bb, el); + if (!TEST_BIT (last_stmt_necessary, arg_bb->index)) + mark_last_stmt_necessary (arg_bb); } + else if (arg_bb != ENTRY_BLOCK_PTR + && !TEST_BIT (visited_control_parents, + arg_bb->index)) + mark_control_dependent_edges_necessary (arg_bb, el, true); } } } else { /* Propagate through the operands. Examine all the USE, VUSE and - VDEF operands in this statement. Mark all the statements + VDEF operands in this statement. Mark all the statements which feed this statement's uses as necessary. */ ssa_op_iter iter; tree use; + /* If this is a call to free which is directly fed by an + allocation function do not mark that necessary through + processing the argument. */ + if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) + { + tree ptr = gimple_call_arg (stmt, 0); + gimple def_stmt; + tree def_callee; + /* If the pointer we free is defined by an allocation + function do not add the call to the worklist. */ + if (TREE_CODE (ptr) == SSA_NAME + && is_gimple_call (def_stmt = SSA_NAME_DEF_STMT (ptr)) + && (def_callee = gimple_call_fndecl (def_stmt)) + && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL + && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC + || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) + continue; + } + FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) mark_operand_necessary (use); @@ -731,8 +892,17 @@ propagate_necessity (struct edge_list *el) if (callee != NULL_TREE && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET + || DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET_CHK || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC - || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE)) + || DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC + || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE + || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END + || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA + || (DECL_FUNCTION_CODE (callee) + == BUILT_IN_ALLOCA_WITH_ALIGN) + || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE + || DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE + || DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED)) continue; /* Calls implicitly load from memory, their arguments @@ -744,6 +914,8 @@ propagate_necessity (struct edge_list *el) if (TREE_CODE (arg) == SSA_NAME || is_gimple_min_invariant (arg)) continue; + if (TREE_CODE (arg) == WITH_SIZE_EXPR) + arg = TREE_OPERAND (arg, 0); if (!ref_may_be_aliased (arg)) mark_aliased_reaching_defs_necessary (stmt, arg); } @@ -751,26 +923,26 @@ propagate_necessity (struct edge_list *el) else if (gimple_assign_single_p (stmt)) { tree rhs; - bool rhs_aliased = false; /* If this is a load mark things necessary. */ rhs = gimple_assign_rhs1 (stmt); if (TREE_CODE (rhs) != SSA_NAME - && !is_gimple_min_invariant (rhs)) + && !is_gimple_min_invariant (rhs) + && TREE_CODE (rhs) != CONSTRUCTOR) { if (!ref_may_be_aliased (rhs)) mark_aliased_reaching_defs_necessary (stmt, rhs); else - rhs_aliased = true; + mark_all_reaching_defs_necessary (stmt); } - if (rhs_aliased) - mark_all_reaching_defs_necessary (stmt); } else if (gimple_code (stmt) == GIMPLE_RETURN) { tree rhs = gimple_return_retval (stmt); /* A return statement may perform a load. */ - if (TREE_CODE (rhs) != SSA_NAME - && !is_gimple_min_invariant (rhs)) + if (rhs + && TREE_CODE (rhs) != SSA_NAME + && !is_gimple_min_invariant (rhs) + && TREE_CODE (rhs) != CONSTRUCTOR) { if (!ref_may_be_aliased (rhs)) mark_aliased_reaching_defs_necessary (stmt, rhs); @@ -788,6 +960,7 @@ propagate_necessity (struct edge_list *el) tree op = TREE_VALUE (gimple_asm_input_op (stmt, i)); if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op) + && TREE_CODE (op) != CONSTRUCTOR && !ref_may_be_aliased (op)) mark_aliased_reaching_defs_necessary (stmt, op); } @@ -796,11 +969,16 @@ propagate_necessity (struct edge_list *el) gcc_unreachable (); /* If we over-used our alias oracle budget drop to simple - mode. The cost metric allows quadratic behavior up to - a constant maximal chain and after that falls back to + mode. The cost metric allows quadratic behavior + (number of uses times number of may-defs queries) up to + a constant maximal number of queries and after that falls back to super-linear complexity. */ - if (longest_chain > 256 - && total_chain > 256 * longest_chain) + if (/* Constant but quadratic for small functions. */ + total_chain > 128 * 128 + /* Linear in the number of may-defs. */ + && total_chain > 32 * longest_chain + /* Linear in the number of uses. */ + && total_chain > nr_walks * 32) { chain_ovfl = true; if (visited) @@ -810,36 +988,47 @@ propagate_necessity (struct edge_list *el) } } -/* Replace all uses of result of PHI by underlying variable and mark it +/* Replace all uses of NAME by underlying variable and mark it for renaming. */ -static void -mark_virtual_phi_result_for_renaming (gimple phi) +void +mark_virtual_operand_for_renaming (tree name) { bool used = false; imm_use_iterator iter; use_operand_p use_p; gimple stmt; - if (dump_file && (dump_flags & TDF_DETAILS)) - { - fprintf (dump_file, "Marking result for renaming : "); - print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); - fprintf (dump_file, "\n"); - } - FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (phi)) + tree name_var; + + name_var = SSA_NAME_VAR (name); + FOR_EACH_IMM_USE_STMT (stmt, iter, name) { - if (gimple_code (stmt) != GIMPLE_PHI - && !gimple_plf (stmt, STMT_NECESSARY)) - continue; FOR_EACH_IMM_USE_ON_STMT (use_p, iter) - SET_USE (use_p, SSA_NAME_VAR (gimple_phi_result (phi))); + SET_USE (use_p, name_var); update_stmt (stmt); used = true; } if (used) - mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi))); + mark_sym_for_renaming (name_var); } +/* Replace all uses of result of PHI by underlying variable and mark it + for renaming. */ + +void +mark_virtual_phi_result_for_renaming (gimple phi) +{ + if (dump_file && (dump_flags & TDF_DETAILS)) + { + fprintf (dump_file, "Marking result for renaming : "); + print_gimple_stmt (dump_file, phi, 0, TDF_SLIM); + fprintf (dump_file, "\n"); + } + + mark_virtual_operand_for_renaming (gimple_phi_result (phi)); +} + + /* Remove dead PHI nodes from block BB. */ static bool @@ -873,7 +1062,8 @@ remove_dead_phis (basic_block bb) FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef) FOR_EACH_IMM_USE_ON_STMT (use_p, iter) SET_USE (use_p, vuse); - if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)) + if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef) + && TREE_CODE (vuse) == SSA_NAME) SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1; } else @@ -900,27 +1090,6 @@ remove_dead_phis (basic_block bb) return something_changed; } -/* Find first live post dominator of BB. */ - -static basic_block -get_live_post_dom (basic_block bb) -{ - basic_block post_dom_bb; - - - /* The post dominance info has to be up-to-date. */ - gcc_assert (dom_info_state (CDI_POST_DOMINATORS) == DOM_OK); - - /* Get the immediate post dominator of bb. */ - post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); - /* And look for first live one. */ - while (post_dom_bb != EXIT_BLOCK_PTR - && !TEST_BIT (bb_contains_live_stmts, post_dom_bb->index)) - post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, post_dom_bb); - - return post_dom_bb; -} - /* Forward edge E to respective POST_DOM_BB and update PHIs. */ static edge @@ -941,47 +1110,40 @@ forward_edge_to_pdom (edge e, basic_block post_dom_bb) if (e2 != e) return e2; - if (phi_nodes (post_dom_bb)) + if (!gimple_seq_empty_p (phi_nodes (post_dom_bb))) { /* We are sure that for every live PHI we are seeing control dependent BB. - This means that we can look up the end of control dependent path leading - to the PHI itself. */ + This means that we can pick any edge to duplicate PHI args from. */ FOR_EACH_EDGE (e2, ei, post_dom_bb->preds) - if (e2 != e && dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->src)) + if (e2 != e) break; for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);) { gimple phi = gsi_stmt (gsi); tree op; + source_location locus; - /* Dead PHI do not imply control dependency. */ - if (!gimple_plf (phi, STMT_NECESSARY) - && is_gimple_reg (gimple_phi_result (phi))) + /* PHIs for virtuals have no control dependency relation on them. + We are lost here and must force renaming of the symbol. */ + if (!is_gimple_reg (gimple_phi_result (phi))) { - gsi_next (&gsi); + mark_virtual_phi_result_for_renaming (phi); + remove_phi_node (&gsi, true); continue; } - if (gimple_phi_arg_def (phi, e->dest_idx)) + + /* Dead PHI do not imply control dependency. */ + if (!gimple_plf (phi, STMT_NECESSARY)) { gsi_next (&gsi); continue; } - /* We didn't find edge to update. This can happen for PHIs on virtuals - since there is no control dependency relation on them. We are lost - here and must force renaming of the symbol. */ - if (!is_gimple_reg (gimple_phi_result (phi))) - { - mark_virtual_phi_result_for_renaming (phi); - remove_phi_node (&gsi, true); - continue; - } - if (!e2) - op = gimple_phi_arg_def (phi, e->dest_idx == 0 ? 1 : 0); - else - op = gimple_phi_arg_def (phi, e2->dest_idx); - add_phi_arg (phi, op, e); - gcc_assert (e2 || degenerate_phi_p (phi)); + op = gimple_phi_arg_def (phi, e2->dest_idx); + locus = gimple_phi_arg_location (phi, e2->dest_idx); + add_phi_arg (phi, op, e, locus); + /* The resulting PHI if not dead can only be degenerate. */ + gcc_assert (degenerate_phi_p (phi)); gsi_next (&gsi); } } @@ -1017,7 +1179,7 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) edge e, e2; edge_iterator ei; - post_dom_bb = get_live_post_dom (bb); + post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb); e = find_edge (bb, post_dom_bb); @@ -1053,12 +1215,31 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb) ei_next (&ei); } + /* If this is a store into a variable that is being optimized away, + add a debug bind stmt if possible. */ + if (MAY_HAVE_DEBUG_STMTS + && gimple_assign_single_p (stmt) + && is_gimple_val (gimple_assign_rhs1 (stmt))) + { + tree lhs = gimple_assign_lhs (stmt); + if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL) + && !DECL_IGNORED_P (lhs) + && is_gimple_reg_type (TREE_TYPE (lhs)) + && !is_global_var (lhs) + && !DECL_HAS_VALUE_EXPR_P (lhs)) + { + tree rhs = gimple_assign_rhs1 (stmt); + gimple note + = gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt); + gsi_insert_after (i, note, GSI_SAME_STMT); + } + } + unlink_stmt_vdef (stmt); - gsi_remove (i, true); - release_defs (stmt); + gsi_remove (i, true); + release_defs (stmt); } - /* Eliminate unnecessary statements. Any instruction not marked as necessary contributes nothing to the program, and can be deleted. */ @@ -1067,77 +1248,142 @@ eliminate_unnecessary_stmts (void) { bool something_changed = false; basic_block bb; - gimple_stmt_iterator gsi; + gimple_stmt_iterator gsi, psi; gimple stmt; tree call; + VEC (basic_block, heap) *h; if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "\nEliminating unnecessary statements:\n"); clear_special_calls (); - FOR_EACH_BB (bb) + /* Walking basic blocks and statements in reverse order avoids + releasing SSA names before any other DEFs that refer to them are + released. This helps avoid loss of debug information, as we get + a chance to propagate all RHSs of removed SSAs into debug uses, + rather than only the latest ones. E.g., consider: + + x_3 = y_1 + z_2; + a_5 = x_3 - b_4; + # DEBUG a => a_5 + + If we were to release x_3 before a_5, when we reached a_5 and + tried to substitute it into the debug stmt, we'd see x_3 there, + but x_3's DEF, type, etc would have already been disconnected. + By going backwards, the debug stmt first changes to: + + # DEBUG a => x_3 - b_4 + + and then to: + + # DEBUG a => y_1 + z_2 - b_4 + + as desired. */ + gcc_assert (dom_info_available_p (CDI_DOMINATORS)); + h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR)); + + while (VEC_length (basic_block, h)) { + bb = VEC_pop (basic_block, h); + /* Remove dead statements. */ - for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) + for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi) { stmt = gsi_stmt (gsi); + psi = gsi; + gsi_prev (&psi); + stats.total++; + /* We can mark a call to free as not necessary if the + defining statement of its argument is an allocation + function and that is not necessary itself. */ + if (gimple_call_builtin_p (stmt, BUILT_IN_FREE)) + { + tree ptr = gimple_call_arg (stmt, 0); + tree callee2; + gimple def_stmt; + if (TREE_CODE (ptr) != SSA_NAME) + continue; + def_stmt = SSA_NAME_DEF_STMT (ptr); + if (!is_gimple_call (def_stmt) + || gimple_plf (def_stmt, STMT_NECESSARY)) + continue; + callee2 = gimple_call_fndecl (def_stmt); + if (callee2 == NULL_TREE + || DECL_BUILT_IN_CLASS (callee2) != BUILT_IN_NORMAL + || (DECL_FUNCTION_CODE (callee2) != BUILT_IN_MALLOC + && DECL_FUNCTION_CODE (callee2) != BUILT_IN_CALLOC)) + continue; + gimple_set_plf (stmt, STMT_NECESSARY, false); + } + /* If GSI is not necessary then remove it. */ if (!gimple_plf (stmt, STMT_NECESSARY)) { + if (!is_gimple_debug (stmt)) + something_changed = true; remove_dead_stmt (&gsi, bb); - something_changed = true; } else if (is_gimple_call (stmt)) { - call = gimple_call_fndecl (stmt); - if (call) + tree name = gimple_call_lhs (stmt); + + notice_special_calls (stmt); + + /* When LHS of var = call (); is dead, simplify it into + call (); saving one operand. */ + if (name + && TREE_CODE (name) == SSA_NAME + && !TEST_BIT (processed, SSA_NAME_VERSION (name)) + /* Avoid doing so for allocation calls which we + did not mark as necessary, it will confuse the + special logic we apply to malloc/free pair removal. */ + && (!(call = gimple_call_fndecl (stmt)) + || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL + || (DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC + && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC + && DECL_FUNCTION_CODE (call) != BUILT_IN_ALLOCA + && (DECL_FUNCTION_CODE (call) + != BUILT_IN_ALLOCA_WITH_ALIGN)))) { - tree name; - - /* When LHS of var = call (); is dead, simplify it into - call (); saving one operand. */ - name = gimple_call_lhs (stmt); - if (name && TREE_CODE (name) == SSA_NAME - && !TEST_BIT (processed, SSA_NAME_VERSION (name))) + something_changed = true; + if (dump_file && (dump_flags & TDF_DETAILS)) { - something_changed = true; - if (dump_file && (dump_flags & TDF_DETAILS)) - { - fprintf (dump_file, "Deleting LHS of call: "); - print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); - fprintf (dump_file, "\n"); - } - - gimple_call_set_lhs (stmt, NULL_TREE); - maybe_clean_or_replace_eh_stmt (stmt, stmt); - update_stmt (stmt); - release_ssa_name (name); + fprintf (dump_file, "Deleting LHS of call: "); + print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); + fprintf (dump_file, "\n"); } - notice_special_calls (stmt); + + gimple_call_set_lhs (stmt, NULL_TREE); + maybe_clean_or_replace_eh_stmt (stmt, stmt); + update_stmt (stmt); + release_ssa_name (name); } - gsi_next (&gsi); - } - else - { - gsi_next (&gsi); } } } + + VEC_free (basic_block, heap, h); + /* Since we don't track liveness of virtual PHI nodes, it is possible that we rendered some PHI nodes unreachable while they are still in use. Mark them for renaming. */ if (cfg_altered) { - basic_block next_bb; + basic_block prev_bb; + find_unreachable_blocks (); - for (bb = ENTRY_BLOCK_PTR->next_bb; bb != EXIT_BLOCK_PTR; bb = next_bb) + + /* Delete all unreachable basic blocks in reverse dominator order. */ + for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb) { - next_bb = bb->next_bb; - if (!(bb->flags & BB_REACHABLE)) + prev_bb = bb->prev_bb; + + if (!TEST_BIT (bb_contains_live_stmts, bb->index) + || !(bb->flags & BB_REACHABLE)) { for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi)))) @@ -1159,7 +1405,36 @@ eliminate_unnecessary_stmts (void) if (found) mark_virtual_phi_result_for_renaming (gsi_stmt (gsi)); } - delete_basic_block (bb); + + if (!(bb->flags & BB_REACHABLE)) + { + /* Speed up the removal of blocks that don't + dominate others. Walking backwards, this should + be the common case. ??? Do we need to recompute + dominators because of cfg_altered? */ + if (!MAY_HAVE_DEBUG_STMTS + || !first_dom_son (CDI_DOMINATORS, bb)) + delete_basic_block (bb); + else + { + h = get_all_dominated_blocks (CDI_DOMINATORS, bb); + + while (VEC_length (basic_block, h)) + { + bb = VEC_pop (basic_block, h); + prev_bb = bb->prev_bb; + /* Rearrangements to the CFG may have failed + to update the dominators tree, so that + formerly-dominated blocks are now + otherwise reachable. */ + if (!!(bb->flags & BB_REACHABLE)) + continue; + delete_basic_block (bb); + } + + VEC_free (basic_block, heap, h); + } + } } } } @@ -1265,6 +1540,8 @@ perform_tree_ssa_dce (bool aggressive) struct edge_list *el = NULL; bool something_changed = 0; + calculate_dominance_info (CDI_DOMINATORS); + /* Preheaders are needed for SCEV to work. Simple lateches and recorded exits improve chances that loop will proved to be finite in testcases such as in loop-15.c and loop-24.c */ @@ -1296,7 +1573,9 @@ perform_tree_ssa_dce (bool aggressive) longest_chain = 0; total_chain = 0; + nr_walks = 0; chain_ovfl = false; + visited = BITMAP_ALLOC (NULL); propagate_necessity (el); BITMAP_FREE (visited); @@ -1324,7 +1603,7 @@ perform_tree_ssa_dce (bool aggressive) free_edge_list (el); if (something_changed) - return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect + return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect | TODO_remove_unused_locals); else return 0; @@ -1377,7 +1656,7 @@ struct gimple_opt_pass pass_dce = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */ + TODO_verify_ssa /* todo_flags_finish */ } }; @@ -1396,7 +1675,7 @@ struct gimple_opt_pass pass_dce_loop = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */ + TODO_verify_ssa /* todo_flags_finish */ } }; @@ -1415,7 +1694,7 @@ struct gimple_opt_pass pass_cd_dce = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - TODO_dump_func | TODO_verify_ssa + TODO_verify_ssa | TODO_verify_flow /* todo_flags_finish */ } };