+2011-08-24 Richard Guenther <rguenther@suse.de>
+
+ PR tree-optimization/50067
+ * tree-data-ref.c (dr_analyze_indices): Do not add an access
+ function for a MEM_REF base that has no evolution in the loop
+ nest or that is not analyzable.
+
2011-08-23 Vladimir Makarov <vmakarov@redhat.com>
* ira.c (ira_init_register_move_cost): Check small subclasses
+2011-08-24 Richard Guenther <rguenther@suse.de>
+
+ PR tree-optimization/50067
+ * gcc.dg/torture/pr50067-3.c: New testcase.
+ * gcc.dg/torture/pr50067-4.c: Likewise.
+ * gcc.dg/torture/pr50067-5.c: Likewise.
+
2011-08-23 Kirill Yukhin <kirill.yukhin@intel.com>
* g++.dg/other/i386-2.C: Add -mavx2 check.
--- /dev/null
+/* { dg-do run } */
+/* { dg-options "-fpredictive-commoning" } */
+
+extern void abort (void);
+int a[6] = { 0, 0, 0, 0, 7, 0 };
+static int *p = &a[4];
+
+int
+main ()
+{
+ int i;
+ for (i = 0; i < 4; ++i)
+ {
+ a[i + 1] = a[i + 2] > i;
+ *p &= ~1;
+ }
+ if (a[4] != 0)
+ abort ();
+ return 0;
+}
--- /dev/null
+/* { dg-do run } */
+
+/* Verify we do not get a bogus access function with 0B vs. 1B which
+ disambiguates both accesses and leads to vectorization. */
+
+extern int memcmp(const void *, const void *, __SIZE_TYPE__);
+extern void abort (void);
+short a[33] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 };
+short b[33] = { 0, };
+int main()
+{
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+ int i;
+ if (sizeof (short) == 2)
+ {
+ for (i = 0; i < 64; ++i)
+ {
+ (*((char(*)[])&a[1]))[i] = (*((char(*)[])&a[0]))[i+1];
+ }
+ if (memcmp (&a, &b, sizeof (a)) != 0)
+ abort ();
+ }
+#endif
+ return 0;
+}
--- /dev/null
+/* { dg-do run } */
+
+/* Verify we do not get a bogus access function pairs with
+ exchanged dimensions, 0, {1, +, 1}_1 vs. {2B, +, 1}_1, 0 which
+ disambiguates both accesses and leads to vectorization. */
+
+extern int memcmp(const void *, const void *, __SIZE_TYPE__);
+extern void abort (void);
+short a[33] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31 };
+short b[33] = { 0, };
+char * volatile ap_ = (char *)&a[0];
+int main()
+{
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+ int i;
+ char *ap = ap_;
+ if (sizeof (short) == 2)
+ {
+ for (i = 0; i < 64; ++i)
+ {
+ (*((char(*)[])&ap[i+2]))[0] = (*((char(*)[])&ap[0]))[i+1];
+ }
+ if (memcmp (&a, &b, sizeof (a)) != 0)
+ abort ();
+ }
+#endif
+ return 0;
+}
aref = TREE_OPERAND (aref, 0);
}
+ /* If the address operand of a MEM_REF base has an evolution in the
+ analyzed nest, add it as an additional independent access-function. */
if (TREE_CODE (aref) == MEM_REF)
{
op = TREE_OPERAND (aref, 0);
access_fn = analyze_scalar_evolution (loop, op);
access_fn = instantiate_scev (before_loop, loop, access_fn);
- base = initial_condition (access_fn);
- split_constant_offset (base, &base, &off);
- if (!integer_zerop (TREE_OPERAND (aref, 1)))
+ if (TREE_CODE (access_fn) == POLYNOMIAL_CHREC)
{
- off = size_binop (PLUS_EXPR, off,
- fold_convert (ssizetype, TREE_OPERAND (aref, 1)));
- TREE_OPERAND (aref, 1)
- = build_int_cst (TREE_TYPE (TREE_OPERAND (aref, 1)), 0);
+ base = initial_condition (access_fn);
+ split_constant_offset (base, &base, &off);
+ /* Fold the MEM_REF offset into the evolutions initial
+ value to make more bases comparable. */
+ if (!integer_zerop (TREE_OPERAND (aref, 1)))
+ {
+ off = size_binop (PLUS_EXPR, off,
+ fold_convert (ssizetype,
+ TREE_OPERAND (aref, 1)));
+ TREE_OPERAND (aref, 1)
+ = build_int_cst (TREE_TYPE (TREE_OPERAND (aref, 1)), 0);
+ }
+ access_fn = chrec_replace_initial_condition
+ (access_fn, fold_convert (TREE_TYPE (base), off));
+ TREE_OPERAND (aref, 0) = base;
+ VEC_safe_push (tree, heap, access_fns, access_fn);
}
- access_fn = chrec_replace_initial_condition (access_fn,
- fold_convert (TREE_TYPE (base), off));
-
- TREE_OPERAND (aref, 0) = base;
- VEC_safe_push (tree, heap, access_fns, access_fn);
}
if (TREE_CODE (ref) == MEM_REF