+ /* Rewrite an LROTATE_EXPR by a constant into an
+ RROTATE_EXPR by a new constant. */
+ if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
+ {
+ TREE_SET_CODE (t, RROTATE_EXPR);
+ code = RROTATE_EXPR;
+ TREE_OPERAND (t, 1) = arg1
+ = const_binop
+ (MINUS_EXPR,
+ convert (TREE_TYPE (arg1),
+ build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
+ arg1, 0);
+ if (tree_int_cst_sgn (arg1) < 0)
+ return t;
+ }
+
+ /* If we have a rotate of a bit operation with the rotate count and
+ the second operand of the bit operation both constant,
+ permute the two operations. */
+ if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == BIT_AND_EXPR
+ || TREE_CODE (arg0) == BIT_ANDTC_EXPR
+ || TREE_CODE (arg0) == BIT_IOR_EXPR
+ || TREE_CODE (arg0) == BIT_XOR_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return fold (build (TREE_CODE (arg0), type,
+ fold (build (code, type,
+ TREE_OPERAND (arg0, 0), arg1)),
+ fold (build (code, type,
+ TREE_OPERAND (arg0, 1), arg1))));
+
+ /* Two consecutive rotates adding up to the width of the mode can
+ be ignored. */
+ if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (arg0) == RROTATE_EXPR
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (arg1) == 0
+ && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
+ && ((TREE_INT_CST_LOW (arg1)
+ + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
+ == GET_MODE_BITSIZE (TYPE_MODE (type))))
+ return TREE_OPERAND (arg0, 0);
+