X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Ftree-ssa-reassoc.c;h=8d1c05c86e1eb20b58855cbbee7a9c2642cb2db3;hp=be68331faf2e40ea5f9bfa2400e22fc2cb2026b0;hb=dccaf904df39815109ff6bfd8cd8c5da2b23acf2;hpb=dddf503682e4d834e9a8cbbd280a0e60586b5b98 diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c index be68331faf2..8d1c05c86e1 100644 --- a/gcc/tree-ssa-reassoc.c +++ b/gcc/tree-ssa-reassoc.c @@ -1,5 +1,5 @@ /* Reassociation for trees. - Copyright (C) 2005, 2007 Free Software Foundation, Inc. + Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. Contributed by Daniel Berlin This file is part of GCC. @@ -22,7 +22,6 @@ along with GCC; see the file COPYING3. If not see #include "system.h" #include "coretypes.h" #include "tm.h" -#include "errors.h" #include "ggc.h" #include "tree.h" #include "basic-block.h" @@ -33,6 +32,7 @@ along with GCC; see the file COPYING3. If not see #include "tree-dump.h" #include "timevar.h" #include "tree-iterator.h" +#include "real.h" #include "tree-pass.h" #include "alloc-pool.h" #include "vec.h" @@ -107,34 +107,34 @@ along with GCC; see the file COPYING3. If not see mergetmp2 = d + e and put mergetmp2 on the merge worklist. - + so merge worklist = {mergetmp, c, mergetmp2} - + Continue building binary ops of these operations until you have only one operation left on the worklist. - + So we have - + build binary op mergetmp3 = mergetmp + c - + worklist = {mergetmp2, mergetmp3} - + mergetmp4 = mergetmp2 + mergetmp3 - + worklist = {mergetmp4} - + because we have one operation left, we can now just set the original statement equal to the result of that operation. - + This will at least expose a + b and d + e to redundancy elimination as binary operations. - + For extra points, you can reuse the old statements to build the mergetmps, since you shouldn't run out. So why don't we do this? - + Because it's expensive, and rarely will help. Most trees we are reassociating have 3 or less ops. If they have 2 ops, they already will be written into a nice single binary op. If you have 3 ops, a @@ -143,18 +143,18 @@ along with GCC; see the file COPYING3. If not see mergetmp = op1 + op2 newstmt = mergetmp + op3 - + instead of mergetmp = op2 + op3 newstmt = mergetmp + op1 - + If all three are of the same rank, you can't expose them all in a single binary operator anyway, so the above is *still* the best you can do. - + Thus, this is what we do. When we have three ops left, we check to see what order to put them in, and call it a day. As a nod to vector sum - reduction, we check if any of ops are a really a phi node that is a + reduction, we check if any of the ops are really a phi node that is a destructive update for the associating op, and keep the destructive update together for vector sum reduction recognition. */ @@ -172,11 +172,15 @@ static struct typedef struct operand_entry { unsigned int rank; + int id; tree op; } *operand_entry_t; static alloc_pool operand_entry_pool; +/* This is used to assign a unique ID to each struct operand_entry + so that qsort results are identical on different hosts. */ +static int next_operand_entry_id; /* Starting rank number for a given basic block, so that we can rank operations using unmovable instructions in that BB based on the bb @@ -193,7 +197,7 @@ static inline long find_operand_rank (tree e) { void **slot = pointer_map_contains (operand_rank, e); - return slot ? (long) *slot : -1; + return slot ? (long) (intptr_t) *slot : -1; } /* Insert {E,RANK} into the operand rank hashtable. */ @@ -205,7 +209,7 @@ insert_operand_rank (tree e, long rank) gcc_assert (rank > 0); slot = pointer_map_insert (operand_rank, e); gcc_assert (!*slot); - *slot = (void *) rank; + *slot = (void *) (intptr_t) rank; } /* Given an expression E, return the rank of the expression. */ @@ -243,7 +247,7 @@ get_rank (tree e) return 0; if (!is_gimple_assign (stmt) - || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS)) + || gimple_vdef (stmt)) return bb_rank[gimple_bb (stmt)->index]; /* If we already have a rank for this expression, use that. */ @@ -328,16 +332,31 @@ sort_by_operand_rank (const void *pa, const void *pb) to fold when added/multiplied//whatever are put next to each other. Since all constants have rank 0, order them by type. */ if (oeb->rank == 0 && oea->rank == 0) - return constant_type (oeb->op) - constant_type (oea->op); + { + if (constant_type (oeb->op) != constant_type (oea->op)) + return constant_type (oeb->op) - constant_type (oea->op); + else + /* To make sorting result stable, we use unique IDs to determine + order. */ + return oeb->id - oea->id; + } /* Lastly, make sure the versions that are the same go next to each other. We use SSA_NAME_VERSION because it's stable. */ if ((oeb->rank - oea->rank == 0) && TREE_CODE (oea->op) == SSA_NAME && TREE_CODE (oeb->op) == SSA_NAME) - return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op); + { + if (SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op)) + return SSA_NAME_VERSION (oeb->op) - SSA_NAME_VERSION (oea->op); + else + return oeb->id - oea->id; + } - return oeb->rank - oea->rank; + if (oeb->rank != oea->rank) + return oeb->rank - oea->rank; + else + return oeb->id - oea->id; } /* Add an operand entry to *OPS for the tree operand OP. */ @@ -349,6 +368,7 @@ add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op) oe->op = op; oe->rank = get_rank (op); + oe->id = next_operand_entry_id++; VEC_safe_push (operand_entry_t, heap, *ops, oe); } @@ -448,7 +468,7 @@ eliminate_duplicate_pair (enum tree_code opcode, { VEC_free (operand_entry_t, heap, *ops); *ops = NULL; - add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op), + add_to_ops_vec (ops, fold_convert (TREE_TYPE (last->op), integer_zero_node)); *all_done = true; } @@ -467,6 +487,8 @@ eliminate_duplicate_pair (enum tree_code opcode, return false; } +static VEC(tree, heap) *plus_negates; + /* If OPCODE is PLUS_EXPR, CURR->OP is really a negate expression, look in OPS for a corresponding positive operation to cancel it out. If we find one, remove the other from OPS, replace @@ -512,7 +534,7 @@ eliminate_plus_minus_pair (enum tree_code opcode, } VEC_ordered_remove (operand_entry_t, *ops, i); - add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op), + add_to_ops_vec (ops, fold_convert(TREE_TYPE (oe->op), integer_zero_node)); VEC_ordered_remove (operand_entry_t, *ops, currindex); reassociate_stats.ops_eliminated ++; @@ -521,6 +543,10 @@ eliminate_plus_minus_pair (enum tree_code opcode, } } + /* CURR->OP is a negate expr in a plus expr: save it for later + inspection in repropagate_negates(). */ + VEC_safe_push (tree, heap, plus_negates, curr->op); + return false; } @@ -580,7 +606,7 @@ eliminate_not_pairs (enum tree_code opcode, oe->op = build_low_bits_mask (TREE_TYPE (oe->op), TYPE_PRECISION (TREE_TYPE (oe->op))); - reassociate_stats.ops_eliminated + reassociate_stats.ops_eliminated += VEC_length (operand_entry_t, *ops) - 1; VEC_free (operand_entry_t, heap, *ops); *ops = NULL; @@ -619,9 +645,9 @@ eliminate_using_constants (enum tree_code opcode, if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Found & 0, removing all other ops\n"); - reassociate_stats.ops_eliminated + reassociate_stats.ops_eliminated += VEC_length (operand_entry_t, *ops) - 1; - + VEC_free (operand_entry_t, heap, *ops); *ops = NULL; VEC_safe_push (operand_entry_t, heap, *ops, oelast); @@ -647,15 +673,15 @@ eliminate_using_constants (enum tree_code opcode, if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Found | -1, removing all other ops\n"); - reassociate_stats.ops_eliminated + reassociate_stats.ops_eliminated += VEC_length (operand_entry_t, *ops) - 1; - + VEC_free (operand_entry_t, heap, *ops); *ops = NULL; VEC_safe_push (operand_entry_t, heap, *ops, oelast); return; } - } + } else if (integer_zerop (oelast->op)) { if (VEC_length (operand_entry_t, *ops) != 1) @@ -678,8 +704,8 @@ eliminate_using_constants (enum tree_code opcode, { if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Found * 0, removing all other ops\n"); - - reassociate_stats.ops_eliminated + + reassociate_stats.ops_eliminated += VEC_length (operand_entry_t, *ops) - 1; VEC_free (operand_entry_t, heap, *ops); *ops = NULL; @@ -734,6 +760,7 @@ static void linearize_expr_tree (VEC(operand_entry_t, heap) **, gimple, /* Structure for tracking and counting operands. */ typedef struct oecount_s { int cnt; + int id; enum tree_code oecode; tree op; } oecount; @@ -771,7 +798,11 @@ oecount_cmp (const void *p1, const void *p2) { const oecount *c1 = (const oecount *)p1; const oecount *c2 = (const oecount *)p2; - return c1->cnt - c2->cnt; + if (c1->cnt != c2->cnt) + return c1->cnt - c2->cnt; + else + /* If counts are identical, use unique IDs to stabilize qsort. */ + return c1->id - c2->id; } /* Walks the linear chain with result *DEF searching for an operation @@ -845,7 +876,7 @@ build_and_add_sum (tree tmpvar, tree op1, tree op2, enum tree_code opcode) if ((!op1def || gimple_nop_p (op1def)) && (!op2def || gimple_nop_p (op2def))) { - gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR)); + gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR)); gsi_insert_before (&gsi, sum, GSI_NEW_STMT); } else if ((!op1def || gimple_nop_p (op1def)) @@ -854,26 +885,50 @@ build_and_add_sum (tree tmpvar, tree op1, tree op2, enum tree_code opcode) { if (gimple_code (op2def) == GIMPLE_PHI) { - gsi = gsi_start_bb (gimple_bb (op2def)); + gsi = gsi_after_labels (gimple_bb (op2def)); gsi_insert_before (&gsi, sum, GSI_NEW_STMT); } else { - gsi = gsi_for_stmt (op2def); - gsi_insert_after (&gsi, sum, GSI_NEW_STMT); + if (!stmt_ends_bb_p (op2def)) + { + gsi = gsi_for_stmt (op2def); + gsi_insert_after (&gsi, sum, GSI_NEW_STMT); + } + else + { + edge e; + edge_iterator ei; + + FOR_EACH_EDGE (e, ei, gimple_bb (op2def)->succs) + if (e->flags & EDGE_FALLTHRU) + gsi_insert_on_edge_immediate (e, sum); + } } } else { if (gimple_code (op1def) == GIMPLE_PHI) { - gsi = gsi_start_bb (gimple_bb (op1def)); + gsi = gsi_after_labels (gimple_bb (op1def)); gsi_insert_before (&gsi, sum, GSI_NEW_STMT); } else { - gsi = gsi_for_stmt (op1def); - gsi_insert_after (&gsi, sum, GSI_NEW_STMT); + if (!stmt_ends_bb_p (op1def)) + { + gsi = gsi_for_stmt (op1def); + gsi_insert_after (&gsi, sum, GSI_NEW_STMT); + } + else + { + edge e; + edge_iterator ei; + + FOR_EACH_EDGE (e, ei, gimple_bb (op1def)->succs) + if (e->flags & EDGE_FALLTHRU) + gsi_insert_on_edge_immediate (e, sum); + } } } update_stmt (sum); @@ -929,6 +984,7 @@ undistribute_ops_list (enum tree_code opcode, VEC (operand_entry_t, heap) **subops; htab_t ctable; bool changed = false; + int next_oecount_id = 0; if (length <= 1 || opcode != PLUS_EXPR) @@ -996,6 +1052,7 @@ undistribute_ops_list (enum tree_code opcode, size_t idx; c.oecode = oecode; c.cnt = 1; + c.id = next_oecount_id++; c.op = oe1->op; VEC_safe_push (oecount, heap, cvec, &c); idx = VEC_length (oecount, cvec) + 41; @@ -1242,13 +1299,37 @@ is_phi_for_stmt (gimple stmt, tree operand) return false; } +/* Remove def stmt of VAR if VAR has zero uses and recurse + on rhs1 operand if so. */ + +static void +remove_visited_stmt_chain (tree var) +{ + gimple stmt; + gimple_stmt_iterator gsi; + + while (1) + { + if (TREE_CODE (var) != SSA_NAME || !has_zero_uses (var)) + return; + stmt = SSA_NAME_DEF_STMT (var); + if (!is_gimple_assign (stmt) + || !gimple_visited_p (stmt)) + return; + var = gimple_assign_rhs1 (stmt); + gsi = gsi_for_stmt (stmt); + gsi_remove (&gsi, true); + release_defs (stmt); + } +} + /* Recursively rewrite our linearized statements so that the operators match those in OPS[OPINDEX], putting the computation in rank order. */ static void rewrite_expr_tree (gimple stmt, unsigned int opindex, - VEC(operand_entry_t, heap) * ops) + VEC(operand_entry_t, heap) * ops, bool moved) { tree rhs1 = gimple_assign_rhs1 (stmt); tree rhs2 = gimple_assign_rhs2 (stmt); @@ -1325,6 +1406,8 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex, gimple_assign_set_rhs1 (stmt, oe1->op); gimple_assign_set_rhs2 (stmt, oe2->op); update_stmt (stmt); + if (rhs1 != oe1->op && rhs1 != oe2->op) + remove_visited_stmt_chain (rhs1); if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -1344,6 +1427,24 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex, if (oe->op != rhs2) { + if (!moved) + { + gimple_stmt_iterator gsinow, gsirhs1; + gimple stmt1 = stmt, stmt2; + unsigned int count; + + gsinow = gsi_for_stmt (stmt); + count = VEC_length (operand_entry_t, ops) - opindex - 2; + while (count-- != 0) + { + stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1)); + gsirhs1 = gsi_for_stmt (stmt2); + gsi_move_before (&gsirhs1, &gsinow); + gsi_prev (&gsinow); + stmt1 = stmt2; + } + moved = true; + } if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -1362,7 +1463,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex, } /* Recurse on the LHS of the binary operator, which is guaranteed to be the non-leaf side. */ - rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops); + rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops, moved); } /* Transform STMT, which is really (A +B) + (C + D) into the left @@ -1432,8 +1533,6 @@ get_single_immediate_use (tree lhs) return NULL; } -static VEC(tree, heap) *broken_up_subtracts; - /* Recursively negate the value of TONEGATE, and return the SSA_NAME representing the negated value. Insertions of any necessary instructions go before GSI. @@ -1476,7 +1575,6 @@ negate_value (tree tonegate, gimple_stmt_iterator *gsi) tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate); resultofnegate = force_gimple_operand_gsi (gsi, tonegate, true, NULL_TREE, true, GSI_SAME_STMT); - VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate); return resultofnegate; } @@ -1538,7 +1636,6 @@ static void linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt, bool is_associative, bool set_visited) { - gimple_stmt_iterator gsinow, gsilhs; tree binlhs = gimple_assign_rhs1 (stmt); tree binrhs = gimple_assign_rhs2 (stmt); gimple binlhsdef, binrhsdef; @@ -1619,9 +1716,6 @@ linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt, gcc_assert (TREE_CODE (binrhs) != SSA_NAME || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs), rhscode, loop)); - gsinow = gsi_for_stmt (stmt); - gsilhs = gsi_for_stmt (SSA_NAME_DEF_STMT (binlhs)); - gsi_move_before (&gsilhs, &gsinow); linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs), is_associative, set_visited); add_to_ops_vec (ops, binrhs); @@ -1636,18 +1730,19 @@ repropagate_negates (void) unsigned int i = 0; tree negate; - for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++) + for (i = 0; VEC_iterate (tree, plus_negates, i, negate); i++) { gimple user = get_single_immediate_use (negate); + if (!user || !is_gimple_assign (user)) + continue; + /* The negate operand can be either operand of a PLUS_EXPR (it can be the LHS if the RHS is a constant for example). Force the negate operand to the RHS of the PLUS_EXPR, then transform the PLUS_EXPR into a MINUS_EXPR. */ - if (user - && is_gimple_assign (user) - && gimple_assign_rhs_code (user) == PLUS_EXPR) + if (gimple_assign_rhs_code (user) == PLUS_EXPR) { /* If the negated operand appears on the LHS of the PLUS_EXPR, exchange the operands of the PLUS_EXPR @@ -1670,21 +1765,75 @@ repropagate_negates (void) update_stmt (user); } } + else if (gimple_assign_rhs_code (user) == MINUS_EXPR) + { + if (gimple_assign_rhs1 (user) == negate) + { + /* We have + x = -a + y = x - b + which we transform into + x = a + b + y = -x . + This pushes down the negate which we possibly can merge + into some other operation, hence insert it into the + plus_negates vector. */ + gimple feed = SSA_NAME_DEF_STMT (negate); + tree a = gimple_assign_rhs1 (feed); + tree rhs2 = gimple_assign_rhs2 (user); + gimple_stmt_iterator gsi = gsi_for_stmt (feed), gsi2; + gimple_replace_lhs (feed, negate); + gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, a, rhs2); + update_stmt (gsi_stmt (gsi)); + gsi2 = gsi_for_stmt (user); + gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, negate, NULL); + update_stmt (gsi_stmt (gsi2)); + gsi_move_before (&gsi, &gsi2); + VEC_safe_push (tree, heap, plus_negates, + gimple_assign_lhs (gsi_stmt (gsi2))); + } + else + { + /* Transform "x = -a; y = b - x" into "y = b + a", getting + rid of one operation. */ + gimple feed = SSA_NAME_DEF_STMT (negate); + tree a = gimple_assign_rhs1 (feed); + tree rhs1 = gimple_assign_rhs1 (user); + gimple_stmt_iterator gsi = gsi_for_stmt (user); + gimple_assign_set_rhs_with_ops (&gsi, PLUS_EXPR, rhs1, a); + update_stmt (gsi_stmt (gsi)); + } + } } } +/* Returns true if OP is of a type for which we can do reassociation. + That is for integral or non-saturating fixed-point types, and for + floating point type when associative-math is enabled. */ + +static bool +can_reassociate_p (tree op) +{ + tree type = TREE_TYPE (op); + if (INTEGRAL_TYPE_P (type) + || NON_SAT_FIXED_POINT_TYPE_P (type) + || (flag_associative_math && SCALAR_FLOAT_TYPE_P (type))) + return true; + return false; +} + /* Break up subtract operations in block BB. We do this top down because we don't know whether the subtract is part of a possible chain of reassociation except at the top. - + IE given d = f + g c = a + e b = c - d q = b - r k = t - q - + we want to break up k = t - q, but we won't until we've transformed q = b - r, which won't be broken up until we transform b = c - d. @@ -1701,27 +1850,15 @@ break_up_subtract_bb (basic_block bb) gimple stmt = gsi_stmt (gsi); gimple_set_visited (stmt, false); + if (!is_gimple_assign (stmt) + || !can_reassociate_p (gimple_assign_lhs (stmt))) + continue; + /* Look for simple gimple subtract operations. */ - if (is_gimple_assign (stmt) - && gimple_assign_rhs_code (stmt) == MINUS_EXPR) + if (gimple_assign_rhs_code (stmt) == MINUS_EXPR) { - tree lhs = gimple_assign_lhs (stmt); - tree rhs1 = gimple_assign_rhs1 (stmt); - tree rhs2 = gimple_assign_rhs2 (stmt); - - /* If associative-math we can do reassociation for - non-integral types. Or, we can do reassociation for - non-saturating fixed-point types. */ - if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs)) - || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) - || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2))) - && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs)) - || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs1)) - || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs2)) - || !flag_associative_math) - && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (lhs)) - || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs1)) - || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs2)))) + if (!can_reassociate_p (gimple_assign_rhs1 (stmt)) + || !can_reassociate_p (gimple_assign_rhs2 (stmt))) continue; /* Check for a subtract used only in an addition. If this @@ -1731,6 +1868,9 @@ break_up_subtract_bb (basic_block bb) if (should_break_up_subtract (stmt)) break_up_subtract (stmt, &gsi); } + else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR + && can_reassociate_p (gimple_assign_rhs1 (stmt))) + VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt)); } for (son = first_dom_son (CDI_DOMINATORS, bb); son; @@ -1771,6 +1911,18 @@ reassociate_bb (basic_block bb) { gsi_remove (&gsi, true); release_defs (stmt); + /* We might end up removing the last stmt above which + places the iterator to the end of the sequence. + Reset it to the last stmt in this case which might + be the end of the sequence as well if we removed + the last statement of the sequence. In which case + we need to bail out. */ + if (gsi_end_p (gsi)) + { + gsi = gsi_last_bb (bb); + if (gsi_end_p (gsi)) + break; + } } continue; } @@ -1779,19 +1931,9 @@ reassociate_bb (basic_block bb) rhs1 = gimple_assign_rhs1 (stmt); rhs2 = gimple_assign_rhs2 (stmt); - /* If associative-math we can do reassociation for - non-integral types. Or, we can do reassociation for - non-saturating fixed-point types. */ - if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs)) - || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1)) - || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2))) - && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs)) - || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs1)) - || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs2)) - || !flag_associative_math) - && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (lhs)) - || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs1)) - || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs2)))) + if (!can_reassociate_p (lhs) + || !can_reassociate_p (rhs1) + || !can_reassociate_p (rhs2)) continue; if (associative_tree_code (rhs_code)) @@ -1827,11 +1969,13 @@ reassociate_bb (basic_block bb) fprintf (dump_file, "Transforming "); print_gimple_stmt (dump_file, stmt, 0, 0); } - + + rhs1 = gimple_assign_rhs1 (stmt); gimple_assign_set_rhs_from_tree (&gsi, VEC_last (operand_entry_t, ops)->op); update_stmt (stmt); + remove_visited_stmt_chain (rhs1); if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -1840,9 +1984,7 @@ reassociate_bb (basic_block bb) } } else - { - rewrite_expr_tree (stmt, 0, ops); - } + rewrite_expr_tree (stmt, 0, ops, false); VEC_free (operand_entry_t, heap, ops); } @@ -1905,6 +2047,7 @@ init_reassoc (void) operand_entry_pool = create_alloc_pool ("operand entry pool", sizeof (struct operand_entry), 30); + next_operand_entry_id = 0; /* Reverse RPO (Reverse Post Order) will give us something where deeper loops come later. */ @@ -1938,7 +2081,7 @@ init_reassoc (void) free (bbs); calculate_dominance_info (CDI_POST_DOMINATORS); - broken_up_subtracts = NULL; + plus_negates = NULL; } /* Cleanup after the reassociation pass, and print stats if @@ -1959,7 +2102,7 @@ fini_reassoc (void) pointer_map_destroy (operand_rank); free_alloc_pool (operand_entry_pool); free (bb_rank); - VEC_free (tree, heap, broken_up_subtracts); + VEC_free (tree, heap, plus_negates); free_dominance_info (CDI_POST_DOMINATORS); loop_optimizer_finalize (); } @@ -1995,7 +2138,7 @@ struct gimple_opt_pass pass_reassoc = NULL, /* next */ 0, /* static_pass_number */ TV_TREE_REASSOC, /* tv_id */ - PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ + PROP_cfg | PROP_ssa, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */