OSDN Git Service

2008-09-30 Paolo Bonzini <bonzini@gnu.org>
authorbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>
Wed, 1 Oct 2008 12:22:17 +0000 (12:22 +0000)
committerbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>
Wed, 1 Oct 2008 12:22:17 +0000 (12:22 +0000)
PR tree-optimization/37662
* tree-ssa-ccp.c (fold_gimple_assign): Invert the operands of a
commutative binary operation if they are in the wrong order and
fold_build2 produces non-GIMPLE.

git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@140809 138bc75d-0d04-0410-961f-82ee72b054a4

gcc/ChangeLog
gcc/tree-ssa-ccp.c

index ba26097..b6f1601 100644 (file)
@@ -1,3 +1,10 @@
+2008-09-30  Paolo Bonzini  <bonzini@gnu.org>
+
+       PR tree-optimization/37662
+       * tree-ssa-ccp.c (fold_gimple_assign): Invert the operands of a
+       commutative binary operation if they are in the wrong order and
+       fold_build2 produces non-GIMPLE.
+
 2008-09-30  Jakub Jelinek  <jakub@redhat.com>
 
        PR tree-optimization/37662
index 22626a5..383367e 100644 (file)
@@ -2711,6 +2711,17 @@ fold_gimple_assign (gimple_stmt_iterator *si)
           STRIP_USELESS_TYPE_CONVERSION (result);
           if (valid_gimple_rhs_p (result))
            return result;
+
+         /* Fold might have produced non-GIMPLE, so if we trust it blindly
+            we lose canonicalization opportunities.  Do not go again
+            through fold here though, or the same non-GIMPLE will be
+            produced.  */
+          if (commutative_tree_code (subcode)
+              && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
+                                       gimple_assign_rhs2 (stmt), false))
+            return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
+                           gimple_assign_rhs2 (stmt),
+                           gimple_assign_rhs1 (stmt));
         }
       break;