+/* *GSI_P is a GIMPLE_CALL to a builtin function.
+ Optimize
+ memcpy (p, "abcd", 4);
+ memset (p + 4, ' ', 3);
+ into
+ memcpy (p, "abcd ", 7);
+ call if the latter can be stored by pieces during expansion. */
+
+static bool
+simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
+{
+ gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
+ tree vuse = gimple_vuse (stmt2);
+ if (vuse == NULL)
+ return false;
+ stmt1 = SSA_NAME_DEF_STMT (vuse);
+
+ switch (DECL_FUNCTION_CODE (callee2))
+ {
+ case BUILT_IN_MEMSET:
+ if (gimple_call_num_args (stmt2) != 3
+ || gimple_call_lhs (stmt2)
+ || CHAR_BIT != 8
+ || BITS_PER_UNIT != 8)
+ break;
+ else
+ {
+ tree callee1;
+ tree ptr1, src1, str1, off1, len1, lhs1;
+ tree ptr2 = gimple_call_arg (stmt2, 0);
+ tree val2 = gimple_call_arg (stmt2, 1);
+ tree len2 = gimple_call_arg (stmt2, 2);
+ tree diff, vdef, new_str_cst;
+ gimple use_stmt;
+ unsigned int ptr1_align;
+ unsigned HOST_WIDE_INT src_len;
+ char *src_buf;
+ use_operand_p use_p;
+
+ if (!host_integerp (val2, 0)
+ || !host_integerp (len2, 1))
+ break;
+ if (is_gimple_call (stmt1))
+ {
+ /* If first stmt is a call, it needs to be memcpy
+ or mempcpy, with string literal as second argument and
+ constant length. */
+ callee1 = gimple_call_fndecl (stmt1);
+ if (callee1 == NULL_TREE
+ || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
+ || gimple_call_num_args (stmt1) != 3)
+ break;
+ if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
+ && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
+ break;
+ ptr1 = gimple_call_arg (stmt1, 0);
+ src1 = gimple_call_arg (stmt1, 1);
+ len1 = gimple_call_arg (stmt1, 2);
+ lhs1 = gimple_call_lhs (stmt1);
+ if (!host_integerp (len1, 1))
+ break;
+ str1 = string_constant (src1, &off1);
+ if (str1 == NULL_TREE)
+ break;
+ if (!host_integerp (off1, 1)
+ || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
+ || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
+ - tree_low_cst (off1, 1)) > 0
+ || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
+ || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
+ != TYPE_MODE (char_type_node))
+ break;
+ }
+ else if (gimple_assign_single_p (stmt1))
+ {
+ /* Otherwise look for length 1 memcpy optimized into
+ assignment. */
+ ptr1 = gimple_assign_lhs (stmt1);
+ src1 = gimple_assign_rhs1 (stmt1);
+ if (TREE_CODE (ptr1) != MEM_REF
+ || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
+ || !host_integerp (src1, 0))
+ break;
+ ptr1 = build_fold_addr_expr (ptr1);
+ callee1 = NULL_TREE;
+ len1 = size_one_node;
+ lhs1 = NULL_TREE;
+ off1 = size_zero_node;
+ str1 = NULL_TREE;
+ }
+ else
+ break;
+
+ diff = constant_pointer_difference (ptr1, ptr2);
+ if (diff == NULL && lhs1 != NULL)
+ {
+ diff = constant_pointer_difference (lhs1, ptr2);
+ if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
+ && diff != NULL)
+ diff = size_binop (PLUS_EXPR, diff,
+ fold_convert (sizetype, len1));
+ }
+ /* If the difference between the second and first destination pointer
+ is not constant, or is bigger than memcpy length, bail out. */
+ if (diff == NULL
+ || !host_integerp (diff, 1)
+ || tree_int_cst_lt (len1, diff))
+ break;
+
+ /* Use maximum of difference plus memset length and memcpy length
+ as the new memcpy length, if it is too big, bail out. */
+ src_len = tree_low_cst (diff, 1);
+ src_len += tree_low_cst (len2, 1);
+ if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
+ src_len = tree_low_cst (len1, 1);
+ if (src_len > 1024)
+ break;
+
+ /* If mempcpy value is used elsewhere, bail out, as mempcpy
+ with bigger length will return different result. */
+ if (lhs1 != NULL_TREE
+ && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
+ && (TREE_CODE (lhs1) != SSA_NAME
+ || !single_imm_use (lhs1, &use_p, &use_stmt)
+ || use_stmt != stmt2))
+ break;
+
+ /* If anything reads memory in between memcpy and memset
+ call, the modified memcpy call might change it. */
+ vdef = gimple_vdef (stmt1);
+ if (vdef != NULL
+ && (!single_imm_use (vdef, &use_p, &use_stmt)
+ || use_stmt != stmt2))
+ break;
+
+ ptr1_align = get_pointer_alignment (ptr1);
+ /* Construct the new source string literal. */
+ src_buf = XALLOCAVEC (char, src_len + 1);
+ if (callee1)
+ memcpy (src_buf,
+ TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
+ tree_low_cst (len1, 1));
+ else
+ src_buf[0] = tree_low_cst (src1, 0);
+ memset (src_buf + tree_low_cst (diff, 1),
+ tree_low_cst (val2, 1), tree_low_cst (len2, 1));
+ src_buf[src_len] = '\0';
+ /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
+ handle embedded '\0's. */
+ if (strlen (src_buf) != src_len)
+ break;
+ rtl_profile_for_bb (gimple_bb (stmt2));
+ /* If the new memcpy wouldn't be emitted by storing the literal
+ by pieces, this optimization might enlarge .rodata too much,
+ as commonly used string literals couldn't be shared any
+ longer. */
+ if (!can_store_by_pieces (src_len,
+ builtin_strncpy_read_str,
+ src_buf, ptr1_align, false))
+ break;
+
+ new_str_cst = build_string_literal (src_len, src_buf);
+ if (callee1)
+ {
+ /* If STMT1 is a mem{,p}cpy call, adjust it and remove
+ memset call. */
+ if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
+ gimple_call_set_lhs (stmt1, NULL_TREE);
+ gimple_call_set_arg (stmt1, 1, new_str_cst);
+ gimple_call_set_arg (stmt1, 2,
+ build_int_cst (TREE_TYPE (len1), src_len));
+ update_stmt (stmt1);
+ unlink_stmt_vdef (stmt2);
+ gsi_remove (gsi_p, true);
+ release_defs (stmt2);
+ if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
+ release_ssa_name (lhs1);
+ return true;
+ }
+ else
+ {
+ /* Otherwise, if STMT1 is length 1 memcpy optimized into
+ assignment, remove STMT1 and change memset call into
+ memcpy call. */
+ gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
+
+ if (!is_gimple_val (ptr1))
+ ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
+ true, GSI_SAME_STMT);
+ gimple_call_set_fndecl (stmt2,
+ builtin_decl_explicit (BUILT_IN_MEMCPY));
+ gimple_call_set_arg (stmt2, 0, ptr1);
+ gimple_call_set_arg (stmt2, 1, new_str_cst);
+ gimple_call_set_arg (stmt2, 2,
+ build_int_cst (TREE_TYPE (len2), src_len));
+ unlink_stmt_vdef (stmt1);
+ gsi_remove (&gsi, true);
+ release_defs (stmt1);
+ update_stmt (stmt2);
+ return false;
+ }
+ }
+ break;
+ default:
+ break;
+ }
+ return false;
+}
+
+/* Checks if expression has type of one-bit precision, or is a known
+ truth-valued expression. */
+static bool
+truth_valued_ssa_name (tree name)
+{
+ gimple def;
+ tree type = TREE_TYPE (name);
+
+ if (!INTEGRAL_TYPE_P (type))
+ return false;
+ /* Don't check here for BOOLEAN_TYPE as the precision isn't
+ necessarily one and so ~X is not equal to !X. */
+ if (TYPE_PRECISION (type) == 1)
+ return true;
+ def = SSA_NAME_DEF_STMT (name);
+ if (is_gimple_assign (def))
+ return truth_value_p (gimple_assign_rhs_code (def));
+ return false;
+}
+
+/* Helper routine for simplify_bitwise_binary_1 function.
+ Return for the SSA name NAME the expression X if it mets condition
+ NAME = !X. Otherwise return NULL_TREE.
+ Detected patterns for NAME = !X are:
+ !X and X == 0 for X with integral type.
+ X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
+static tree
+lookup_logical_inverted_value (tree name)
+{
+ tree op1, op2;
+ enum tree_code code;
+ gimple def;
+
+ /* If name has none-intergal type, or isn't a SSA_NAME, then
+ return. */
+ if (TREE_CODE (name) != SSA_NAME
+ || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
+ return NULL_TREE;
+ def = SSA_NAME_DEF_STMT (name);
+ if (!is_gimple_assign (def))
+ return NULL_TREE;
+
+ code = gimple_assign_rhs_code (def);
+ op1 = gimple_assign_rhs1 (def);
+ op2 = NULL_TREE;
+
+ /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
+ If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
+ if (code == EQ_EXPR || code == NE_EXPR
+ || code == BIT_XOR_EXPR)
+ op2 = gimple_assign_rhs2 (def);
+
+ switch (code)
+ {
+ case BIT_NOT_EXPR:
+ if (truth_valued_ssa_name (name))
+ return op1;
+ break;
+ case EQ_EXPR:
+ /* Check if we have X == 0 and X has an integral type. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
+ break;
+ if (integer_zerop (op2))
+ return op1;
+ break;
+ case NE_EXPR:
+ /* Check if we have X != 1 and X is a truth-valued. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
+ break;
+ if (integer_onep (op2) && truth_valued_ssa_name (op1))
+ return op1;
+ break;
+ case BIT_XOR_EXPR:
+ /* Check if we have X ^ 1 and X is truth valued. */
+ if (integer_onep (op2) && truth_valued_ssa_name (op1))
+ return op1;
+ break;
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
+ operations CODE, if one operand has the logically inverted
+ value of the other. */
+static tree
+simplify_bitwise_binary_1 (enum tree_code code, tree type,
+ tree arg1, tree arg2)
+{
+ tree anot;
+
+ /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
+ if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
+ && code != BIT_XOR_EXPR)
+ return NULL_TREE;
+
+ /* First check if operands ARG1 and ARG2 are equal. If so
+ return NULL_TREE as this optimization is handled fold_stmt. */
+ if (arg1 == arg2)
+ return NULL_TREE;
+ /* See if we have in arguments logical-not patterns. */
+ if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
+ || anot != arg2)
+ && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
+ || anot != arg1))
+ return NULL_TREE;
+
+ /* X & !X -> 0. */
+ if (code == BIT_AND_EXPR)
+ return fold_convert (type, integer_zero_node);
+ /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
+ if (truth_valued_ssa_name (anot))
+ return fold_convert (type, integer_one_node);
+
+ /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
+ return NULL_TREE;
+}
+
+/* Simplify bitwise binary operations.
+ Return true if a transformation applied, otherwise return false. */
+
+static bool
+simplify_bitwise_binary (gimple_stmt_iterator *gsi)
+{
+ gimple stmt = gsi_stmt (*gsi);
+ tree arg1 = gimple_assign_rhs1 (stmt);
+ tree arg2 = gimple_assign_rhs2 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ tree res;
+ gimple def1 = NULL, def2 = NULL;
+ tree def1_arg1, def2_arg1;
+ enum tree_code def1_code, def2_code;
+
+ def1_code = TREE_CODE (arg1);
+ def1_arg1 = arg1;
+ if (TREE_CODE (arg1) == SSA_NAME)
+ {
+ def1 = SSA_NAME_DEF_STMT (arg1);
+ if (is_gimple_assign (def1))
+ {
+ def1_code = gimple_assign_rhs_code (def1);
+ def1_arg1 = gimple_assign_rhs1 (def1);
+ }
+ }
+
+ def2_code = TREE_CODE (arg2);
+ def2_arg1 = arg2;
+ if (TREE_CODE (arg2) == SSA_NAME)
+ {
+ def2 = SSA_NAME_DEF_STMT (arg2);
+ if (is_gimple_assign (def2))
+ {
+ def2_code = gimple_assign_rhs_code (def2);
+ def2_arg1 = gimple_assign_rhs1 (def2);
+ }
+ }
+
+ /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
+ if (TREE_CODE (arg2) == INTEGER_CST
+ && CONVERT_EXPR_CODE_P (def1_code)
+ && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
+ && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
+ {
+ gimple newop;
+ tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
+ newop =
+ gimple_build_assign_with_ops (code, tem, def1_arg1,
+ fold_convert_loc (gimple_location (stmt),
+ TREE_TYPE (def1_arg1),
+ arg2));
+ tem = make_ssa_name (tem, newop);
+ gimple_assign_set_lhs (newop, tem);
+ gimple_set_location (newop, gimple_location (stmt));
+ gsi_insert_before (gsi, newop, GSI_SAME_STMT);
+ gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
+ tem, NULL_TREE, NULL_TREE);
+ update_stmt (gsi_stmt (*gsi));
+ return true;
+ }
+
+ /* For bitwise binary operations apply operand conversions to the
+ binary operation result instead of to the operands. This allows
+ to combine successive conversions and bitwise binary operations. */
+ if (CONVERT_EXPR_CODE_P (def1_code)
+ && CONVERT_EXPR_CODE_P (def2_code)
+ && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
+ /* Make sure that the conversion widens the operands, or has same
+ precision, or that it changes the operation to a bitfield
+ precision. */
+ && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
+ <= TYPE_PRECISION (TREE_TYPE (arg1)))
+ || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
+ != MODE_INT)
+ || (TYPE_PRECISION (TREE_TYPE (arg1))
+ != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
+ {
+ gimple newop;
+ tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
+ NULL);
+ newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
+ tem = make_ssa_name (tem, newop);
+ gimple_assign_set_lhs (newop, tem);
+ gimple_set_location (newop, gimple_location (stmt));
+ gsi_insert_before (gsi, newop, GSI_SAME_STMT);
+ gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
+ tem, NULL_TREE, NULL_TREE);
+ update_stmt (gsi_stmt (*gsi));
+ return true;
+ }
+
+ /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
+ if (code == BIT_AND_EXPR
+ && def1_code == BIT_IOR_EXPR
+ && TREE_CODE (arg2) == INTEGER_CST
+ && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
+ {
+ tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
+ arg2, gimple_assign_rhs2 (def1));
+ tree tem;
+ gimple newop;
+ if (integer_zerop (cst))
+ {
+ gimple_assign_set_rhs1 (stmt, def1_arg1);
+ update_stmt (stmt);
+ return true;
+ }
+ tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
+ newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
+ tem, def1_arg1, arg2);
+ tem = make_ssa_name (tem, newop);
+ gimple_assign_set_lhs (newop, tem);
+ gimple_set_location (newop, gimple_location (stmt));
+ /* Make sure to re-process the new stmt as it's walking upwards. */
+ gsi_insert_before (gsi, newop, GSI_NEW_STMT);
+ gimple_assign_set_rhs1 (stmt, tem);
+ gimple_assign_set_rhs2 (stmt, cst);
+ gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
+ update_stmt (stmt);
+ return true;
+ }
+
+ /* Combine successive equal operations with constants. */
+ if ((code == BIT_AND_EXPR
+ || code == BIT_IOR_EXPR
+ || code == BIT_XOR_EXPR)
+ && def1_code == code
+ && TREE_CODE (arg2) == INTEGER_CST
+ && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
+ {
+ tree cst = fold_build2 (code, TREE_TYPE (arg2),
+ arg2, gimple_assign_rhs2 (def1));
+ gimple_assign_set_rhs1 (stmt, def1_arg1);
+ gimple_assign_set_rhs2 (stmt, cst);
+ update_stmt (stmt);
+ return true;
+ }
+
+ /* Canonicalize X ^ ~0 to ~X. */
+ if (code == BIT_XOR_EXPR
+ && TREE_CODE (arg2) == INTEGER_CST
+ && integer_all_onesp (arg2))
+ {
+ gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
+ gcc_assert (gsi_stmt (*gsi) == stmt);
+ update_stmt (stmt);
+ return true;
+ }
+
+ /* Try simple folding for X op !X, and X op X. */
+ res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
+ if (res != NULL_TREE)
+ {
+ gimple_assign_set_rhs_from_tree (gsi, res);
+ update_stmt (gsi_stmt (*gsi));
+ return true;
+ }
+
+ return false;
+}
+
+
+/* Perform re-associations of the plus or minus statement STMT that are
+ always permitted. Returns true if the CFG was changed. */
+
+static bool
+associate_plusminus (gimple_stmt_iterator *gsi)
+{
+ gimple stmt = gsi_stmt (*gsi);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ tree rhs2 = gimple_assign_rhs2 (stmt);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ bool changed;
+
+ /* We can't reassociate at all for saturating types. */
+ if (TYPE_SATURATING (TREE_TYPE (rhs1)))
+ return false;
+
+ /* First contract negates. */
+ do
+ {
+ changed = false;
+
+ /* A +- (-B) -> A -+ B. */
+ if (TREE_CODE (rhs2) == SSA_NAME)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
+ if (is_gimple_assign (def_stmt)
+ && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
+ && can_propagate_from (def_stmt))
+ {
+ code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
+ gimple_assign_set_rhs_code (stmt, code);
+ rhs2 = gimple_assign_rhs1 (def_stmt);
+ gimple_assign_set_rhs2 (stmt, rhs2);
+ gimple_set_modified (stmt, true);
+ changed = true;
+ }
+ }
+
+ /* (-A) + B -> B - A. */
+ if (TREE_CODE (rhs1) == SSA_NAME
+ && code == PLUS_EXPR)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
+ if (is_gimple_assign (def_stmt)
+ && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
+ && can_propagate_from (def_stmt))
+ {
+ code = MINUS_EXPR;
+ gimple_assign_set_rhs_code (stmt, code);
+ rhs1 = rhs2;
+ gimple_assign_set_rhs1 (stmt, rhs1);
+ rhs2 = gimple_assign_rhs1 (def_stmt);
+ gimple_assign_set_rhs2 (stmt, rhs2);
+ gimple_set_modified (stmt, true);
+ changed = true;
+ }
+ }
+ }
+ while (changed);
+
+ /* We can't reassociate floating-point or fixed-point plus or minus
+ because of saturation to +-Inf. */
+ if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
+ || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
+ goto out;
+
+ /* Second match patterns that allow contracting a plus-minus pair
+ irrespective of overflow issues.
+
+ (A +- B) - A -> +- B
+ (A +- B) -+ B -> A
+ (CST +- A) +- CST -> CST +- A
+ (A + CST) +- CST -> A + CST
+ ~A + A -> -1
+ ~A + 1 -> -A
+ A - (A +- B) -> -+ B
+ A +- (B +- A) -> +- B
+ CST +- (CST +- A) -> CST +- A
+ CST +- (A +- CST) -> CST +- A
+ A + ~A -> -1
+
+ via commutating the addition and contracting operations to zero
+ by reassociation. */
+
+ if (TREE_CODE (rhs1) == SSA_NAME)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
+ if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
+ {
+ enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
+ if (def_code == PLUS_EXPR
+ || def_code == MINUS_EXPR)
+ {
+ tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
+ tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
+ if (operand_equal_p (def_rhs1, rhs2, 0)
+ && code == MINUS_EXPR)
+ {
+ /* (A +- B) - A -> +- B. */
+ code = ((def_code == PLUS_EXPR)
+ ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
+ rhs1 = def_rhs2;
+ rhs2 = NULL_TREE;
+ gimple_assign_set_rhs_with_ops (gsi, code, rhs1, NULL_TREE);
+ gcc_assert (gsi_stmt (*gsi) == stmt);
+ gimple_set_modified (stmt, true);
+ }
+ else if (operand_equal_p (def_rhs2, rhs2, 0)
+ && code != def_code)