if (!pi)
return true;
+ /* If the decl can be used as a restrict tag and we have a restrict
+ pointer and that pointers points-to set doesn't contain this decl
+ then they can't alias. */
+ if (DECL_RESTRICTED_P (decl)
+ && TYPE_RESTRICT (TREE_TYPE (ptr))
+ && pi->pt.vars_contains_restrict)
+ return bitmap_bit_p (pi->pt.vars, DECL_UID (decl));
+
return pt_solution_includes (&pi->pt, decl);
}
if (!pi1 || !pi2)
return true;
+ /* If both pointers are restrict-qualified try to disambiguate
+ with restrict information. */
+ if (TYPE_RESTRICT (TREE_TYPE (ptr1))
+ && TYPE_RESTRICT (TREE_TYPE (ptr2))
+ && !pt_solutions_same_restrict_base (&pi1->pt, &pi2->pt))
+ return false;
+
/* ??? This does not use TBAA to prune decls from the intersection
that not both pointers may access. */
return pt_solutions_intersect (&pi1->pt, &pi2->pt);
return true;
}
-static bool
-ptr_deref_may_alias_ref_p (tree ptr, tree ref)
-{
- ao_ref r;
- ao_ref_init (&r, ref);
- return ptr_deref_may_alias_ref_p_1 (ptr, &r);
-}
-
/* Dump alias information on FILE. */
return ref->ref_alias_set;
}
+/* Init an alias-oracle reference representation from a gimple pointer
+ PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
+ size is assumed to be unknown. The access is assumed to be only
+ to or after of the pointer target, not before it. */
+
+void
+ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
+{
+ HOST_WIDE_INT t1, t2;
+ ref->ref = NULL_TREE;
+ if (TREE_CODE (ptr) == ADDR_EXPR)
+ ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
+ &ref->offset, &t1, &t2);
+ else
+ {
+ ref->base = build1 (INDIRECT_REF, char_type_node, ptr);
+ ref->offset = 0;
+ }
+ if (size
+ && host_integerp (size, 0)
+ && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
+ ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
+ else
+ ref->max_size = ref->size = -1;
+ ref->ref_alias_set = 0;
+ ref->base_alias_set = 0;
+}
+
/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
decide. */
}
/* Return true if two memory references based on the variables BASE1
- and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1[ and
- [OFFSET2, OFFSET2 + MAX_SIZE2[ may alias. */
+ and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
+ [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. */
static bool
decl_refs_may_alias_p (tree base1,
}
/* Return true if an indirect reference based on *PTR1 constrained
- to [OFFSET1, OFFSET1 + MAX_SIZE1[ may alias a variable based on BASE2
- constrained to [OFFSET2, OFFSET2 + MAX_SIZE2[. *PTR1 and BASE2 have
+ to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
+ constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
in which case they are computed on-demand. REF1 and REF2
if non-NULL are the complete memory reference trees. */
}
/* Return true if two indirect references based on *PTR1
- and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1[ and
- [OFFSET2, OFFSET2 + MAX_SIZE2[ may alias. *PTR1 and *PTR2 have
+ and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
+ [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
in which case they are computed on-demand. REF1 and REF2
if non-NULL are the complete memory reference trees. */
/* Return true, if the two memory references REF1 and REF2 may alias. */
-static bool
+bool
refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
{
tree base1, base2;
|| is_gimple_min_invariant (base2))
return false;
+ /* We can end up refering to code via function decls. As we likely
+ do not properly track code aliases conservatively bail out. */
+ if (TREE_CODE (base1) == FUNCTION_DECL
+ || TREE_CODE (base2) == FUNCTION_DECL)
+ return true;
+
/* Defer to simple offset based disambiguation if we have
references based on two decls. Do this before defering to
TBAA to handle must-alias cases in conformance with the
otherwise return false. */
static bool
-ref_maybe_used_by_call_p_1 (gimple call, tree ref)
+ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
{
tree base, callee;
unsigned i;
&& (flags & (ECF_CONST|ECF_NOVOPS)))
goto process_args;
- /* If the reference is not based on a decl give up.
- ??? Handle indirect references by intersecting the call-used
- solution with that of the pointer. */
- base = get_base_address (ref);
- if (!base
- || !DECL_P (base))
+ base = ao_ref_base (ref);
+ if (!base)
return true;
/* If the reference is based on a decl that is not aliased the call
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
{
- tree src = gimple_call_arg (call, 1);
- return ptr_deref_may_alias_ref_p (src, ref);
+ ao_ref dref;
+ tree size = NULL_TREE;
+ if (gimple_call_num_args (call) == 3)
+ size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 1),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
}
+ /* The following builtins do not read from memory. */
+ case BUILT_IN_FREE:
+ case BUILT_IN_MEMSET:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ return false;
+
default:
/* Fallthru to general call handling. */;
}
it may be used. */
if (flags & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
{
- if (is_call_used (base))
+ if (DECL_P (base))
+ {
+ if (is_call_used (base))
+ return true;
+ }
+ else if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
+ if (!pi)
+ return true;
+
+ if (pt_solution_includes_global (&pi->pt)
+ || pt_solutions_intersect (&cfun->gimple_df->callused, &pi->pt)
+ || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt))
+ return true;
+ }
+ else
return true;
}
else
{
- if (is_call_clobbered (base))
+ if (DECL_P (base))
+ {
+ if (is_call_clobbered (base))
+ return true;
+ }
+ else if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
+ if (!pi)
+ return true;
+
+ if (pt_solution_includes_global (&pi->pt)
+ || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt))
+ return true;
+ }
+ else
return true;
}
{
tree op = gimple_call_arg (call, i);
- if (TREE_CODE (op) == EXC_PTR_EXPR
- || TREE_CODE (op) == FILTER_EXPR)
- continue;
-
if (TREE_CODE (op) == WITH_SIZE_EXPR)
op = TREE_OPERAND (op, 0);
if (TREE_CODE (op) != SSA_NAME
- && !is_gimple_min_invariant (op)
- && refs_may_alias_p (op, ref))
- return true;
+ && !is_gimple_min_invariant (op))
+ {
+ ao_ref r;
+ ao_ref_init (&r, op);
+ if (refs_may_alias_p_1 (&r, ref, true))
+ return true;
+ }
}
return false;
static bool
ref_maybe_used_by_call_p (gimple call, tree ref)
{
- bool res = ref_maybe_used_by_call_p_1 (call, ref);
+ ao_ref r;
+ bool res;
+ ao_ref_init (&r, ref);
+ res = ref_maybe_used_by_call_p_1 (call, &r);
if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias;
else
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
+ case BUILT_IN_MEMSET:
{
- tree dest = gimple_call_arg (call, 0);
- return ptr_deref_may_alias_ref_p_1 (dest, ref);
+ ao_ref dref;
+ tree size = NULL_TREE;
+ if (gimple_call_num_args (call) == 3)
+ size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 0),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
}
/* Freeing memory kills the pointed-to memory. More importantly
the call has to serve as a barrier for moving loads and stores
- across it. Same is true for memset. */
+ across it. */
case BUILT_IN_FREE:
- case BUILT_IN_MEMSET:
{
tree ptr = gimple_call_arg (call, 0);
return ptr_deref_may_alias_ref_p_1 (ptr, ref);
}
- case BUILT_IN_FREXP:
- case BUILT_IN_FREXPF:
- case BUILT_IN_FREXPL:
case BUILT_IN_GAMMA_R:
case BUILT_IN_GAMMAF_R:
case BUILT_IN_GAMMAL_R:
case BUILT_IN_LGAMMA_R:
case BUILT_IN_LGAMMAF_R:
case BUILT_IN_LGAMMAL_R:
+ {
+ tree out = gimple_call_arg (call, 1);
+ if (ptr_deref_may_alias_ref_p_1 (out, ref))
+ return true;
+ if (flag_errno_math)
+ break;
+ return false;
+ }
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
case BUILT_IN_MODF:
case BUILT_IN_MODFF:
case BUILT_IN_MODFL:
case BUILT_IN_REMQUOL:
{
tree out = gimple_call_arg (call, 2);
- return ptr_deref_may_alias_ref_p_1 (out, ref);
+ if (ptr_deref_may_alias_ref_p_1 (out, ref))
+ return true;
+ if (flag_errno_math)
+ break;
+ return false;
}
case BUILT_IN_SINCOS:
case BUILT_IN_SINCOSF:
if (DECL_P (base))
return is_call_clobbered (base);
+ else if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
+ if (!pi)
+ return true;
+
+ return (pt_solution_includes_global (&pi->pt)
+ || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt));
+ }
return true;
}
The function returns the number of statements walked. */
static unsigned int
-walk_aliased_vdefs_1 (tree ref, tree vdef,
- bool (*walker)(tree, tree, void *), void *data,
+walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
+ bool (*walker)(ao_ref *, tree, void *), void *data,
bitmap *visited, unsigned int cnt)
{
do
/* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
cnt++;
if ((!ref
- || stmt_may_clobber_ref_p (def_stmt, ref))
+ || stmt_may_clobber_ref_p_1 (def_stmt, ref))
&& (*walker) (ref, vdef, data))
return cnt;
}
unsigned int
-walk_aliased_vdefs (tree ref, tree vdef,
- bool (*walker)(tree, tree, void *), void *data,
+walk_aliased_vdefs (ao_ref *ref, tree vdef,
+ bool (*walker)(ao_ref *, tree, void *), void *data,
bitmap *visited)
{
bitmap local_visited = NULL;