if (!pi)
return true;
- /* If the decl can be used as a restrict tag and we have a restrict
- pointer and that pointers points-to set doesn't contain this decl
- then they can't alias. */
- if (DECL_RESTRICTED_P (decl)
- && pi->pt.vars_contains_restrict)
- return bitmap_bit_p (pi->pt.vars, DECL_PT_UID (decl));
-
return pt_solution_includes (&pi->pt, decl);
}
if (!pi1 || !pi2)
return true;
- /* If both pointers are restrict-qualified try to disambiguate
- with restrict information. */
- if (!pt_solutions_same_restrict_base (&pi1->pt, &pi2->pt))
- return false;
-
/* ??? This does not use TBAA to prune decls from the intersection
that not both pointers may access. */
return pt_solutions_intersect (&pi1->pt, &pi2->pt);
dump_decl_set (file, pt->vars);
if (pt->vars_contains_global)
fprintf (file, " (includes global vars)");
- if (pt->vars_contains_restrict)
- fprintf (file, " (includes restrict tags)");
}
}
r->max_size = -1;
r->ref_alias_set = -1;
r->base_alias_set = -1;
+ r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
}
/* Returns the base object of the memory reference *REF. */
ref->max_size = ref->size = -1;
ref->ref_alias_set = 0;
ref->base_alias_set = 0;
+ ref->volatile_p = false;
}
/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
|| TREE_CODE (base2) == LABEL_DECL)
return true;
+ /* Two volatile accesses always conflict. */
+ if (ref1->volatile_p
+ && ref2->volatile_p)
+ return true;
+
/* Defer to simple offset based disambiguation if we have
references based on two decls. Do this before defering to
TBAA to handle must-alias cases in conformance with the
if (!base)
return true;
+ /* A call that is not without side-effects might involve volatile
+ accesses and thus conflicts with all other volatile accesses. */
+ if (ref->volatile_p)
+ return true;
+
/* If the reference is based on a decl that is not aliased the call
cannot possibly use it. */
if (DECL_P (base)
case BUILT_IN_MEMPCPY:
case BUILT_IN_STPCPY:
case BUILT_IN_STPNCPY:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
{
ao_ref dref;
tree size = NULL_TREE;
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
{
ao_ref dref;
tree size = NULL_TREE;
size);
return refs_may_alias_p_1 (&dref, ref, false);
}
+
+ /* The following functions read memory pointed to by their
+ first argument. */
+ CASE_BUILT_IN_TM_LOAD (1):
+ CASE_BUILT_IN_TM_LOAD (2):
+ CASE_BUILT_IN_TM_LOAD (4):
+ CASE_BUILT_IN_TM_LOAD (8):
+ CASE_BUILT_IN_TM_LOAD (FLOAT):
+ CASE_BUILT_IN_TM_LOAD (DOUBLE):
+ CASE_BUILT_IN_TM_LOAD (LDOUBLE):
+ CASE_BUILT_IN_TM_LOAD (M64):
+ CASE_BUILT_IN_TM_LOAD (M128):
+ CASE_BUILT_IN_TM_LOAD (M256):
+ case BUILT_IN_TM_LOG:
+ case BUILT_IN_TM_LOG_1:
+ case BUILT_IN_TM_LOG_2:
+ case BUILT_IN_TM_LOG_4:
+ case BUILT_IN_TM_LOG_8:
+ case BUILT_IN_TM_LOG_FLOAT:
+ case BUILT_IN_TM_LOG_DOUBLE:
+ case BUILT_IN_TM_LOG_LDOUBLE:
+ case BUILT_IN_TM_LOG_M64:
+ case BUILT_IN_TM_LOG_M128:
+ case BUILT_IN_TM_LOG_M256:
+ return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
+
/* These read memory pointed to by the first argument. */
case BUILT_IN_STRDUP:
case BUILT_IN_STRNDUP:
case BUILT_IN_STACK_SAVE:
case BUILT_IN_STACK_RESTORE:
case BUILT_IN_MEMSET:
+ case BUILT_IN_TM_MEMSET:
case BUILT_IN_MEMSET_CHK:
case BUILT_IN_FREXP:
case BUILT_IN_FREXPF:
|| CONSTANT_CLASS_P (base))
return false;
+ /* A call that is not without side-effects might involve volatile
+ accesses and thus conflicts with all other volatile accesses. */
+ if (ref->volatile_p)
+ return true;
+
/* If the reference is based on a decl that is not aliased the call
cannot possibly clobber it. */
if (DECL_P (base)
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
case BUILT_IN_MEMSET:
+ case BUILT_IN_TM_MEMSET:
+ CASE_BUILT_IN_TM_STORE (1):
+ CASE_BUILT_IN_TM_STORE (2):
+ CASE_BUILT_IN_TM_STORE (4):
+ CASE_BUILT_IN_TM_STORE (8):
+ CASE_BUILT_IN_TM_STORE (FLOAT):
+ CASE_BUILT_IN_TM_STORE (DOUBLE):
+ CASE_BUILT_IN_TM_STORE (LDOUBLE):
+ CASE_BUILT_IN_TM_STORE (M64):
+ CASE_BUILT_IN_TM_STORE (M128):
+ CASE_BUILT_IN_TM_STORE (M256):
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
{
ao_ref dref;
tree size = NULL_TREE;
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_STPCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
case BUILT_IN_STRCAT_CHK:
case BUILT_IN_STRNCAT_CHK:
case BUILT_IN_MEMSET_CHK:
maybe_skip_until (gimple phi, tree target, ao_ref *ref,
tree vuse, bitmap *visited)
{
+ basic_block bb = gimple_bb (phi);
+
if (!*visited)
*visited = BITMAP_ALLOC (NULL);
else if (gimple_nop_p (def_stmt)
|| stmt_may_clobber_ref_p_1 (def_stmt, ref))
return false;
+ /* If we reach a new basic-block see if we already skipped it
+ in a previous walk that ended successfully. */
+ if (gimple_bb (def_stmt) != bb)
+ {
+ if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
+ return true;
+ bb = gimple_bb (def_stmt);
+ }
vuse = gimple_vuse (def_stmt);
}
return true;
until we hit the phi argument definition that dominates the other one. */
else if (nargs >= 2)
{
- tree arg0 = PHI_ARG_DEF (phi, 0);
- tree arg1;
- unsigned i = 1;
- do
+ tree arg0, arg1;
+ unsigned i;
+
+ /* Find a candidate for the virtual operand which definition
+ dominates those of all others. */
+ arg0 = PHI_ARG_DEF (phi, 0);
+ if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
+ for (i = 1; i < nargs; ++i)
+ {
+ arg1 = PHI_ARG_DEF (phi, i);
+ if (SSA_NAME_IS_DEFAULT_DEF (arg1))
+ {
+ arg0 = arg1;
+ break;
+ }
+ if (dominated_by_p (CDI_DOMINATORS,
+ gimple_bb (SSA_NAME_DEF_STMT (arg0)),
+ gimple_bb (SSA_NAME_DEF_STMT (arg1))))
+ arg0 = arg1;
+ }
+
+ /* Then pairwise reduce against the found candidate. */
+ for (i = 0; i < nargs; ++i)
{
arg1 = PHI_ARG_DEF (phi, i);
arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref, visited);
if (!arg0)
return NULL_TREE;
-
}
- while (++i < nargs);
return arg0;
}