{
struct ptr_info_def *pi;
- gcc_assert ((TREE_CODE (ptr) == SSA_NAME
- || TREE_CODE (ptr) == ADDR_EXPR
- || TREE_CODE (ptr) == INTEGER_CST)
- && (TREE_CODE (decl) == VAR_DECL
- || TREE_CODE (decl) == PARM_DECL
- || TREE_CODE (decl) == RESULT_DECL));
-
- /* Non-aliased variables can not be pointed to. */
- if (!may_be_aliased (decl))
+ /* Conversions are irrelevant for points-to information and
+ data-dependence analysis can feed us those. */
+ STRIP_NOPS (ptr);
+
+ /* Anything we do not explicilty handle aliases. */
+ if ((TREE_CODE (ptr) != SSA_NAME
+ && TREE_CODE (ptr) != ADDR_EXPR
+ && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
+ || !POINTER_TYPE_P (TREE_TYPE (ptr))
+ || (TREE_CODE (decl) != VAR_DECL
+ && TREE_CODE (decl) != PARM_DECL
+ && TREE_CODE (decl) != RESULT_DECL))
return false;
+ /* Disregard pointer offsetting. */
+ if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
+ {
+ do
+ {
+ ptr = TREE_OPERAND (ptr, 0);
+ }
+ while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
+ return ptr_deref_may_alias_decl_p (ptr, decl);
+ }
+
/* ADDR_EXPR pointers either just offset another pointer or directly
specify the pointed-to set. */
if (TREE_CODE (ptr) == ADDR_EXPR)
return true;
}
- /* We can end up with dereferencing constant pointers.
- Just bail out in this case. */
- if (TREE_CODE (ptr) == INTEGER_CST)
- return true;
+ /* Non-aliased variables can not be pointed to. */
+ if (!may_be_aliased (decl))
+ return false;
/* If we do not have useful points-to information for this pointer
we cannot disambiguate anything else. */
The caller is responsible for applying TBAA to see if accesses
through PTR1 and PTR2 may conflict at all. */
-static bool
+bool
ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
{
struct ptr_info_def *pi1, *pi2;
- gcc_assert ((TREE_CODE (ptr1) == SSA_NAME
- || TREE_CODE (ptr1) == ADDR_EXPR
- || TREE_CODE (ptr1) == INTEGER_CST)
- && (TREE_CODE (ptr2) == SSA_NAME
- || TREE_CODE (ptr2) == ADDR_EXPR
- || TREE_CODE (ptr2) == INTEGER_CST));
+ /* Conversions are irrelevant for points-to information and
+ data-dependence analysis can feed us those. */
+ STRIP_NOPS (ptr1);
+ STRIP_NOPS (ptr2);
+
+ /* Anything we do not explicilty handle aliases. */
+ if ((TREE_CODE (ptr1) != SSA_NAME
+ && TREE_CODE (ptr1) != ADDR_EXPR
+ && TREE_CODE (ptr1) != POINTER_PLUS_EXPR)
+ || (TREE_CODE (ptr2) != SSA_NAME
+ && TREE_CODE (ptr2) != ADDR_EXPR
+ && TREE_CODE (ptr2) != POINTER_PLUS_EXPR)
+ || !POINTER_TYPE_P (TREE_TYPE (ptr1))
+ || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
+ return true;
+
+ /* Disregard pointer offsetting. */
+ if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
+ {
+ do
+ {
+ ptr1 = TREE_OPERAND (ptr1, 0);
+ }
+ while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
+ return ptr_derefs_may_alias_p (ptr1, ptr2);
+ }
+ if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
+ {
+ do
+ {
+ ptr2 = TREE_OPERAND (ptr2, 0);
+ }
+ while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
+ return ptr_derefs_may_alias_p (ptr1, ptr2);
+ }
/* ADDR_EXPR pointers either just offset another pointer or directly
specify the pointed-to set. */
return true;
}
- /* We can end up with dereferencing constant pointers.
- Just bail out in this case. */
- if (TREE_CODE (ptr1) == INTEGER_CST
- || TREE_CODE (ptr2) == INTEGER_CST)
- return true;
-
/* We may end up with two empty points-to solutions for two same pointers.
In this case we still want to say both pointers alias, so shortcut
that here. */
tree ptrtype1;
HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
- if (TREE_CODE (base1) == TARGET_MEM_REF)
- {
- if (!TMR_BASE (base1)
- || !POINTER_TYPE_P (TMR_BASE (base1)))
- return true;
- ptr1 = TMR_BASE (base1);
- }
- else
- ptr1 = TREE_OPERAND (base1, 0);
+ ptr1 = TREE_OPERAND (base1, 0);
/* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */
(the pointer base cannot validly point to an offset less than zero
of the variable).
They also cannot alias if the pointer may not point to the decl. */
- if ((TREE_CODE (base1) != TARGET_MEM_REF || !TMR_INDEX (base1))
+ if ((TREE_CODE (base1) != TARGET_MEM_REF
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
return false;
if (!ptr_deref_may_alias_decl_p (ptr1, base2))
is relative to the start of the type which we ensure by
comparing rvalue and access type and disregarding the constant
pointer offset. */
- if ((TREE_CODE (base1) != TARGET_MEM_REF || !TMR_INDEX (base1))
+ if ((TREE_CODE (base1) != TARGET_MEM_REF
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& (TREE_CODE (base1) != MEM_REF
|| same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1)
&& same_type_for_tbaa (TREE_TYPE (ptrtype1), TREE_TYPE (base2)) == 1)
tree ptr2;
tree ptrtype1, ptrtype2;
- if (TREE_CODE (base1) == TARGET_MEM_REF)
- {
- if (!TMR_BASE (base1)
- || !POINTER_TYPE_P (TMR_BASE (base1)))
- return true;
- ptr1 = TMR_BASE (base1);
- }
- else
- ptr1 = TREE_OPERAND (base1, 0);
-
- if (TREE_CODE (base2) == TARGET_MEM_REF)
- {
- if (!TMR_BASE (base2)
- || !POINTER_TYPE_P (TMR_BASE (base2)))
- return true;
- ptr2 = TMR_BASE (base2);
- }
- else
- ptr2 = TREE_OPERAND (base2, 0);
+ ptr1 = TREE_OPERAND (base1, 0);
+ ptr2 = TREE_OPERAND (base2, 0);
/* If both bases are based on pointers they cannot alias if they may not
point to the same memory object or if they point to the same object
if ((!cfun || gimple_in_ssa_p (cfun))
&& operand_equal_p (ptr1, ptr2, 0)
&& (((TREE_CODE (base1) != TARGET_MEM_REF
- || !TMR_INDEX (base1))
+ || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
&& (TREE_CODE (base2) != TARGET_MEM_REF
- || !TMR_INDEX (base2)))
+ || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
|| (TREE_CODE (base1) == TARGET_MEM_REF
&& TREE_CODE (base2) == TARGET_MEM_REF
&& (TMR_STEP (base1) == TMR_STEP (base2)
|| (TMR_STEP (base1) && TMR_STEP (base2)
&& operand_equal_p (TMR_STEP (base1),
TMR_STEP (base2), 0)))
- && operand_equal_p (TMR_INDEX (base1), TMR_INDEX (base2), 0))))
+ && (TMR_INDEX (base1) == TMR_INDEX (base2)
+ || (TMR_INDEX (base1) && TMR_INDEX (base2)
+ && operand_equal_p (TMR_INDEX (base1),
+ TMR_INDEX (base2), 0)))
+ && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
+ || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
+ && operand_equal_p (TMR_INDEX2 (base1),
+ TMR_INDEX2 (base2), 0))))))
{
/* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */
gcc_checking_assert ((!ref1->ref
|| TREE_CODE (ref1->ref) == SSA_NAME
|| DECL_P (ref1->ref)
+ || TREE_CODE (ref1->ref) == STRING_CST
|| handled_component_p (ref1->ref)
|| INDIRECT_REF_P (ref1->ref)
|| TREE_CODE (ref1->ref) == MEM_REF
&& (!ref2->ref
|| TREE_CODE (ref2->ref) == SSA_NAME
|| DECL_P (ref2->ref)
+ || TREE_CODE (ref2->ref) == STRING_CST
|| handled_component_p (ref2->ref)
|| INDIRECT_REF_P (ref2->ref)
|| TREE_CODE (ref2->ref) == MEM_REF
*D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
which is seen as a struct copy. */
if (TREE_CODE (base1) == SSA_NAME
- || TREE_CODE (base2) == SSA_NAME
|| TREE_CODE (base1) == CONST_DECL
+ || TREE_CODE (base1) == CONSTRUCTOR
+ || TREE_CODE (base1) == ADDR_EXPR
+ || CONSTANT_CLASS_P (base1)
+ || TREE_CODE (base2) == SSA_NAME
|| TREE_CODE (base2) == CONST_DECL
- || is_gimple_min_invariant (base1)
- || is_gimple_min_invariant (base2))
+ || TREE_CODE (base2) == CONSTRUCTOR
+ || TREE_CODE (base2) == ADDR_EXPR
+ || CONSTANT_CLASS_P (base2))
return false;
/* We can end up refering to code via function and label decls.
As we likely do not properly track code aliases conservatively
bail out. */
if (TREE_CODE (base1) == FUNCTION_DECL
- || TREE_CODE (base2) == FUNCTION_DECL
|| TREE_CODE (base1) == LABEL_DECL
+ || TREE_CODE (base2) == FUNCTION_DECL
|| TREE_CODE (base2) == LABEL_DECL)
return true;
case BUILT_IN_SINCOSF:
case BUILT_IN_SINCOSL:
return false;
+ /* __sync_* builtins and some OpenMP builtins act as threading
+ barriers. */
+#undef DEF_SYNC_BUILTIN
+#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
+#include "sync-builtins.def"
+#undef DEF_SYNC_BUILTIN
+ case BUILT_IN_GOMP_ATOMIC_START:
+ case BUILT_IN_GOMP_ATOMIC_END:
+ case BUILT_IN_GOMP_BARRIER:
+ case BUILT_IN_GOMP_TASKWAIT:
+ case BUILT_IN_GOMP_CRITICAL_START:
+ case BUILT_IN_GOMP_CRITICAL_END:
+ case BUILT_IN_GOMP_CRITICAL_NAME_START:
+ case BUILT_IN_GOMP_CRITICAL_NAME_END:
+ case BUILT_IN_GOMP_LOOP_END:
+ case BUILT_IN_GOMP_ORDERED_START:
+ case BUILT_IN_GOMP_ORDERED_END:
+ case BUILT_IN_GOMP_PARALLEL_END:
+ case BUILT_IN_GOMP_SECTIONS_END:
+ case BUILT_IN_GOMP_SINGLE_COPY_START:
+ case BUILT_IN_GOMP_SINGLE_COPY_END:
+ return true;
default:
/* Fallthru to general call handling. */;
return (ptr_deref_may_alias_ref_p_1 (sin, ref)
|| ptr_deref_may_alias_ref_p_1 (cos, ref));
}
+ /* __sync_* builtins and some OpenMP builtins act as threading
+ barriers. */
+#undef DEF_SYNC_BUILTIN
+#define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
+#include "sync-builtins.def"
+#undef DEF_SYNC_BUILTIN
+ case BUILT_IN_GOMP_ATOMIC_START:
+ case BUILT_IN_GOMP_ATOMIC_END:
+ case BUILT_IN_GOMP_BARRIER:
+ case BUILT_IN_GOMP_TASKWAIT:
+ case BUILT_IN_GOMP_CRITICAL_START:
+ case BUILT_IN_GOMP_CRITICAL_END:
+ case BUILT_IN_GOMP_CRITICAL_NAME_START:
+ case BUILT_IN_GOMP_CRITICAL_NAME_END:
+ case BUILT_IN_GOMP_LOOP_END:
+ case BUILT_IN_GOMP_ORDERED_START:
+ case BUILT_IN_GOMP_ORDERED_END:
+ case BUILT_IN_GOMP_PARALLEL_END:
+ case BUILT_IN_GOMP_SECTIONS_END:
+ case BUILT_IN_GOMP_SINGLE_COPY_START:
+ case BUILT_IN_GOMP_SINGLE_COPY_END:
+ return true;
default:
/* Fallthru to general call handling. */;
}
{
tree lhs = gimple_call_lhs (stmt);
if (lhs
- && !is_gimple_reg (lhs))
+ && TREE_CODE (lhs) != SSA_NAME)
{
ao_ref r;
ao_ref_init (&r, lhs);
else if (gimple_assign_single_p (stmt))
{
tree lhs = gimple_assign_lhs (stmt);
- if (!is_gimple_reg (lhs))
+ if (TREE_CODE (lhs) != SSA_NAME)
{
ao_ref r;
- ao_ref_init (&r, gimple_assign_lhs (stmt));
+ ao_ref_init (&r, lhs);
return refs_may_alias_p_1 (ref, &r, true);
}
}
return stmt_may_clobber_ref_p_1 (stmt, &r);
}
+/* If STMT kills the memory reference REF return true, otherwise
+ return false. */
+
+static bool
+stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
+{
+ if (gimple_has_lhs (stmt)
+ && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME)
+ {
+ tree base, lhs = gimple_get_lhs (stmt);
+ HOST_WIDE_INT size, offset, max_size;
+ ao_ref_base (ref);
+ base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
+ /* We can get MEM[symbol: sZ, index: D.8862_1] here,
+ so base == ref->base does not always hold. */
+ if (base == ref->base)
+ {
+ /* For a must-alias check we need to be able to constrain
+ the accesses properly. */
+ if (size != -1 && size == max_size
+ && ref->max_size != -1)
+ {
+ if (offset <= ref->offset
+ && offset + size >= ref->offset + ref->max_size)
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+bool
+stmt_kills_ref_p (gimple stmt, tree ref)
+{
+ ao_ref r;
+ ao_ref_init (&r, ref);
+ return stmt_kills_ref_p_1 (stmt, &r);
+}
+
/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
TARGET or a statement clobbering the memory reference REF in which