+ if ((!cfun || gimple_in_ssa_p (cfun))
+ && operand_equal_p (ptr1, ptr2, 0)
+ && (((TREE_CODE (base1) != TARGET_MEM_REF
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
+ && (TREE_CODE (base2) != TARGET_MEM_REF
+ || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
+ || (TREE_CODE (base1) == TARGET_MEM_REF
+ && TREE_CODE (base2) == TARGET_MEM_REF
+ && (TMR_STEP (base1) == TMR_STEP (base2)
+ || (TMR_STEP (base1) && TMR_STEP (base2)
+ && operand_equal_p (TMR_STEP (base1),
+ TMR_STEP (base2), 0)))
+ && (TMR_INDEX (base1) == TMR_INDEX (base2)
+ || (TMR_INDEX (base1) && TMR_INDEX (base2)
+ && operand_equal_p (TMR_INDEX (base1),
+ TMR_INDEX (base2), 0)))
+ && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
+ || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
+ && operand_equal_p (TMR_INDEX2 (base1),
+ TMR_INDEX2 (base2), 0))))))
+ {
+ /* The offset embedded in MEM_REFs can be negative. Bias them
+ so that the resulting offset adjustment is positive. */
+ if (TREE_CODE (base1) == MEM_REF
+ || TREE_CODE (base1) == TARGET_MEM_REF)
+ {
+ double_int moff = mem_ref_offset (base1);
+ moff = double_int_lshift (moff,
+ BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT),
+ HOST_BITS_PER_DOUBLE_INT, true);
+ if (double_int_negative_p (moff))
+ offset2 += double_int_neg (moff).low;
+ else
+ offset1 += moff.low;
+ }
+ if (TREE_CODE (base2) == MEM_REF
+ || TREE_CODE (base2) == TARGET_MEM_REF)
+ {
+ double_int moff = mem_ref_offset (base2);
+ moff = double_int_lshift (moff,
+ BITS_PER_UNIT == 8
+ ? 3 : exact_log2 (BITS_PER_UNIT),
+ HOST_BITS_PER_DOUBLE_INT, true);
+ if (double_int_negative_p (moff))
+ offset1 += double_int_neg (moff).low;
+ else
+ offset2 += moff.low;
+ }
+ return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ }