/* Alias analysis for trees.
- Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
ptr = TREE_OPERAND (base, 0);
else if (base
&& SSA_VAR_P (base))
- return operand_equal_p (base, decl, 0);
+ return base == decl;
else if (base
&& CONSTANT_CLASS_P (base))
return false;
if (!pi)
return true;
+ /* If the decl can be used as a restrict tag and we have a restrict
+ pointer and that pointers points-to set doesn't contain this decl
+ then they can't alias. */
+ if (DECL_RESTRICTED_P (decl)
+ && TYPE_RESTRICT (TREE_TYPE (ptr))
+ && pi->pt.vars_contains_restrict)
+ return bitmap_bit_p (pi->pt.vars, DECL_PT_UID (decl));
+
return pt_solution_includes (&pi->pt, decl);
}
return true;
}
-static bool
-ptr_deref_may_alias_ref_p (tree ptr, tree ref)
-{
- ao_ref r;
- ao_ref_init (&r, ref);
- return ptr_deref_may_alias_ref_p_1 (ptr, &r);
-}
-
/* Dump alias information on FILE. */
fprintf (file, "\n\nAlias information for %s\n\n", funcname);
fprintf (file, "Aliased symbols\n\n");
-
+
FOR_EACH_REFERENCED_VAR (var, rvi)
{
if (may_be_aliased (var))
fprintf (file, "\nESCAPED");
dump_points_to_solution (file, &cfun->gimple_df->escaped);
- fprintf (file, "\nCALLUSED");
- dump_points_to_solution (file, &cfun->gimple_df->callused);
fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
{
tree ptr = ssa_name (i);
struct ptr_info_def *pi;
-
+
if (ptr == NULL_TREE
|| SSA_NAME_IN_FREE_LIST (ptr))
continue;
if (pt->escaped)
fprintf (file, ", points-to escaped");
+ if (pt->ipa_escaped)
+ fprintf (file, ", points-to unit escaped");
+
if (pt->null)
fprintf (file, ", points-to NULL");
dump_decl_set (file, pt->vars);
if (pt->vars_contains_global)
fprintf (file, " (includes global vars)");
+ if (pt->vars_contains_restrict)
+ fprintf (file, " (includes restrict tags)");
}
}
return ref->ref_alias_set;
}
+/* Init an alias-oracle reference representation from a gimple pointer
+ PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
+ size is assumed to be unknown. The access is assumed to be only
+ to or after of the pointer target, not before it. */
+
+void
+ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
+{
+ HOST_WIDE_INT t1, t2;
+ ref->ref = NULL_TREE;
+ if (TREE_CODE (ptr) == ADDR_EXPR)
+ ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
+ &ref->offset, &t1, &t2);
+ else
+ {
+ ref->base = build1 (INDIRECT_REF, char_type_node, ptr);
+ ref->offset = 0;
+ }
+ if (size
+ && host_integerp (size, 0)
+ && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
+ ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
+ else
+ ref->max_size = ref->size = -1;
+ ref->ref_alias_set = 0;
+ ref->base_alias_set = 0;
+}
+
/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
decide. */
if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
return 1;
- /* ??? Array types are not properly unified in all cases as we have
+ /* ??? Array types are not properly unified in all cases as we have
spurious changes in the index types for example. Removing this
causes all sorts of problems with the Fortran frontend. */
if (TREE_CODE (type1) == ARRAY_TYPE
&& TREE_CODE (type2) == ARRAY_TYPE)
return -1;
+ /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
+ object of one of its constrained subtypes, e.g. when a function with an
+ unconstrained parameter passed by reference is called on an object and
+ inlined. But, even in the case of a fixed size, type and subtypes are
+ not equivalent enough as to share the same TYPE_CANONICAL, since this
+ would mean that conversions between them are useless, whereas they are
+ not (e.g. type and subtypes can have different modes). So, in the end,
+ they are only guaranteed to have the same alias set. */
+ if (get_alias_set (type1) == get_alias_set (type2))
+ return -1;
+
/* The types are known to be not equal. */
return 0;
}
on an indirect reference may alias. */
static bool
-nonaliasing_component_refs_p (tree ref1, tree type1,
- HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
- tree ref2, tree type2,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
+aliasing_component_refs_p (tree ref1, tree type1,
+ HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+ tree ref2, tree type2,
+ HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
{
/* If one reference is a component references through pointers try to find a
common base and apply offset based disambiguation. This handles
}
/* Return true if two memory references based on the variables BASE1
- and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1[ and
- [OFFSET2, OFFSET2 + MAX_SIZE2[ may alias. */
+ and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
+ [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. */
static bool
decl_refs_may_alias_p (tree base1,
gcc_assert (SSA_VAR_P (base1) && SSA_VAR_P (base2));
/* If both references are based on different variables, they cannot alias. */
- if (!operand_equal_p (base1, base2, 0))
+ if (base1 != base2)
return false;
/* If both references are based on the same variable, they cannot alias if
}
/* Return true if an indirect reference based on *PTR1 constrained
- to [OFFSET1, OFFSET1 + MAX_SIZE1[ may alias a variable based on BASE2
- constrained to [OFFSET2, OFFSET2 + MAX_SIZE2[. *PTR1 and BASE2 have
+ to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
+ constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
in which case they are computed on-demand. REF1 and REF2
if non-NULL are the complete memory reference trees. */
if (ref1 && ref2
&& handled_component_p (ref1)
&& handled_component_p (ref2))
- return nonaliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
- offset1, max_size1,
- ref2, TREE_TYPE (base2),
- offset2, max_size2);
+ return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+ offset1, max_size1,
+ ref2, TREE_TYPE (base2),
+ offset2, max_size2);
return true;
}
/* Return true if two indirect references based on *PTR1
- and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1[ and
- [OFFSET2, OFFSET2 + MAX_SIZE2[ may alias. *PTR1 and *PTR2 have
+ and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
+ [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
in which case they are computed on-demand. REF1 and REF2
if non-NULL are the complete memory reference trees. */
if (ref1 && ref2
&& handled_component_p (ref1)
&& handled_component_p (ref2))
- return nonaliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
- offset1, max_size1,
- ref2, TREE_TYPE (TREE_TYPE (ptr2)),
- offset2, max_size2);
+ return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+ offset1, max_size1,
+ ref2, TREE_TYPE (TREE_TYPE (ptr2)),
+ offset2, max_size2);
return true;
}
{
tree base1, base2;
HOST_WIDE_INT offset1 = 0, offset2 = 0;
- HOST_WIDE_INT size1 = -1, size2 = -1;
HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
bool var1_p, var2_p, ind1_p, ind2_p;
alias_set_type set;
|| SSA_VAR_P (ref1->ref)
|| handled_component_p (ref1->ref)
|| INDIRECT_REF_P (ref1->ref)
- || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
+ || TREE_CODE (ref1->ref) == TARGET_MEM_REF
+ || TREE_CODE (ref1->ref) == CONST_DECL)
&& (!ref2->ref
|| SSA_VAR_P (ref2->ref)
|| handled_component_p (ref2->ref)
|| INDIRECT_REF_P (ref2->ref)
- || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
+ || TREE_CODE (ref2->ref) == TARGET_MEM_REF
+ || TREE_CODE (ref2->ref) == CONST_DECL));
/* Decompose the references into their base objects and the access. */
base1 = ao_ref_base (ref1);
offset1 = ref1->offset;
- size1 = ref1->size;
max_size1 = ref1->max_size;
base2 = ao_ref_base (ref2);
offset2 = ref2->offset;
- size2 = ref2->size;
max_size2 = ref2->max_size;
/* We can end up with registers or constants as bases for example from
which is seen as a struct copy. */
if (TREE_CODE (base1) == SSA_NAME
|| TREE_CODE (base2) == SSA_NAME
+ || TREE_CODE (base1) == CONST_DECL
+ || TREE_CODE (base2) == CONST_DECL
|| is_gimple_min_invariant (base1)
|| is_gimple_min_invariant (base2))
return false;
+ /* We can end up refering to code via function decls. As we likely
+ do not properly track code aliases conservatively bail out. */
+ if (TREE_CODE (base1) == FUNCTION_DECL
+ || TREE_CODE (base2) == FUNCTION_DECL)
+ return true;
+
/* Defer to simple offset based disambiguation if we have
references based on two decls. Do this before defering to
TBAA to handle must-alias cases in conformance with the
return decl_refs_may_alias_p (base1, offset1, max_size1,
base2, offset2, max_size2);
+ ind1_p = INDIRECT_REF_P (base1);
+ ind2_p = INDIRECT_REF_P (base2);
+ /* Canonicalize the pointer-vs-decl case. */
+ if (ind1_p && var2_p)
+ {
+ HOST_WIDE_INT tmp1;
+ tree tmp2;
+ ao_ref *tmp3;
+ tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
+ tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
+ tmp2 = base1; base1 = base2; base2 = tmp2;
+ tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
+ var1_p = true;
+ ind1_p = false;
+ var2_p = false;
+ ind2_p = true;
+ }
+
+ /* If we are about to disambiguate pointer-vs-decl try harder to
+ see must-aliases and give leeway to some invalid cases.
+ This covers a pretty minimal set of cases only and does not
+ when called from the RTL oracle. It handles cases like
+
+ int i = 1;
+ return *(float *)&i;
+
+ and also fixes gfortran.dg/lto/pr40725. */
+ if (var1_p && ind2_p
+ && cfun
+ && gimple_in_ssa_p (cfun)
+ && TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME)
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base2, 0));
+ while (is_gimple_assign (def_stmt)
+ && (gimple_assign_rhs_code (def_stmt) == SSA_NAME
+ || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
+ {
+ tree rhs = gimple_assign_rhs1 (def_stmt);
+ HOST_WIDE_INT offset, size, max_size;
+
+ /* Look through SSA name copies and pointer conversions. */
+ if (TREE_CODE (rhs) == SSA_NAME
+ && POINTER_TYPE_P (TREE_TYPE (rhs)))
+ {
+ def_stmt = SSA_NAME_DEF_STMT (rhs);
+ continue;
+ }
+ if (TREE_CODE (rhs) != ADDR_EXPR)
+ break;
+
+ /* If the pointer is defined as an address based on a decl
+ use plain offset disambiguation and ignore TBAA. */
+ rhs = TREE_OPERAND (rhs, 0);
+ rhs = get_ref_base_and_extent (rhs, &offset, &size, &max_size);
+ if (SSA_VAR_P (rhs))
+ {
+ base2 = rhs;
+ offset2 += offset;
+ if (size != max_size
+ || max_size == -1)
+ max_size2 = -1;
+ return decl_refs_may_alias_p (base1, offset1, max_size1,
+ base2, offset2, max_size2);
+ }
+
+ /* Do not continue looking through &p->x to limit time
+ complexity. */
+ break;
+ }
+ }
+
/* First defer to TBAA if possible. */
if (tbaa_p
&& flag_strict_aliasing
return true;
/* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
- ind1_p = INDIRECT_REF_P (base1);
- ind2_p = INDIRECT_REF_P (base2);
set = tbaa_p ? -1 : 0;
if (var1_p && ind2_p)
return indirect_ref_may_alias_decl_p (ref2->ref, TREE_OPERAND (base2, 0),
offset2, max_size2, set,
ref1->ref, base1,
offset1, max_size1, set);
- else if (ind1_p && var2_p)
- return indirect_ref_may_alias_decl_p (ref1->ref, TREE_OPERAND (base1, 0),
- offset1, max_size1, set,
- ref2->ref, base2,
- offset2, max_size2, set);
else if (ind1_p && ind2_p)
return indirect_refs_may_alias_p (ref1->ref, TREE_OPERAND (base1, 0),
offset1, max_size1, set,
otherwise return false. */
static bool
-ref_maybe_used_by_call_p_1 (gimple call, tree ref)
+ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
{
tree base, callee;
unsigned i;
&& (flags & (ECF_CONST|ECF_NOVOPS)))
goto process_args;
- base = get_base_address (ref);
+ base = ao_ref_base (ref);
if (!base)
return true;
their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
{
- tree src = gimple_call_arg (call, 1);
- return ptr_deref_may_alias_ref_p (src, ref);
+ ao_ref dref;
+ tree size = NULL_TREE;
+ if (gimple_call_num_args (call) == 3)
+ size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 1),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
+ }
+ case BUILT_IN_BCOPY:
+ {
+ ao_ref dref;
+ tree size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 0),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
}
/* The following builtins do not read from memory. */
case BUILT_IN_FREE:
+ case BUILT_IN_MALLOC:
+ case BUILT_IN_CALLOC:
case BUILT_IN_MEMSET:
case BUILT_IN_FREXP:
case BUILT_IN_FREXPF:
goto process_args;
}
- /* If the base variable is call-used or call-clobbered then
- it may be used. */
- if (flags & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
+ /* Check if the base variable is call-used. */
+ if (DECL_P (base))
{
- if (DECL_P (base))
- {
- if (is_call_used (base))
- return true;
- }
- else if (INDIRECT_REF_P (base)
- && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
- {
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
- if (!pi)
- return true;
-
- if (pt_solution_includes_global (&pi->pt)
- || pt_solutions_intersect (&cfun->gimple_df->callused, &pi->pt)
- || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt))
- return true;
- }
- else
+ if (pt_solution_includes (gimple_call_use_set (call), base))
return true;
}
- else
+ else if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
{
- if (DECL_P (base))
- {
- if (is_call_clobbered (base))
- return true;
- }
- else if (INDIRECT_REF_P (base)
- && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
- {
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
- if (!pi)
- return true;
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
+ if (!pi)
+ return true;
- if (pt_solution_includes_global (&pi->pt)
- || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt))
- return true;
- }
- else
+ if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
return true;
}
+ else
+ return true;
/* Inspect call arguments for passed-by-value aliases. */
process_args:
{
tree op = gimple_call_arg (call, i);
- if (TREE_CODE (op) == EXC_PTR_EXPR
- || TREE_CODE (op) == FILTER_EXPR)
- continue;
-
if (TREE_CODE (op) == WITH_SIZE_EXPR)
op = TREE_OPERAND (op, 0);
if (TREE_CODE (op) != SSA_NAME
- && !is_gimple_min_invariant (op)
- && refs_may_alias_p (op, ref))
- return true;
+ && !is_gimple_min_invariant (op))
+ {
+ ao_ref r;
+ ao_ref_init (&r, op);
+ if (refs_may_alias_p_1 (&r, ref, true))
+ return true;
+ }
}
return false;
static bool
ref_maybe_used_by_call_p (gimple call, tree ref)
{
- bool res = ref_maybe_used_by_call_p_1 (call, ref);
+ ao_ref r;
+ bool res;
+ ao_ref_init (&r, ref);
+ res = ref_maybe_used_by_call_p_1 (call, &r);
if (res)
++alias_stats.ref_maybe_used_by_call_p_may_alias;
else
their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
case BUILT_IN_STPNCPY:
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
+ case BUILT_IN_MEMSET:
+ {
+ ao_ref dref;
+ tree size = NULL_TREE;
+ if (gimple_call_num_args (call) == 3)
+ size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 0),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
+ }
+ case BUILT_IN_BCOPY:
{
- tree dest = gimple_call_arg (call, 0);
- return ptr_deref_may_alias_ref_p_1 (dest, ref);
+ ao_ref dref;
+ tree size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 1),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
}
+ /* Allocating memory does not have any side-effects apart from
+ being the definition point for the pointer. */
+ case BUILT_IN_MALLOC:
+ case BUILT_IN_CALLOC:
+ /* Unix98 specifies that errno is set on allocation failure.
+ Until we properly can track the errno location assume it
+ is not a local decl but external or anonymous storage in
+ a different translation unit. Also assume it is of
+ type int as required by the standard. */
+ if (flag_errno_math
+ && TREE_TYPE (base) == integer_type_node)
+ {
+ struct ptr_info_def *pi;
+ if (DECL_P (base)
+ && !TREE_STATIC (base))
+ return true;
+ else if (INDIRECT_REF_P (base)
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))))
+ return pi->pt.anything || pi->pt.nonlocal;
+ }
+ return false;
/* Freeing memory kills the pointed-to memory. More importantly
the call has to serve as a barrier for moving loads and stores
- across it. Same is true for memset. */
+ across it. */
case BUILT_IN_FREE:
- case BUILT_IN_MEMSET:
{
tree ptr = gimple_call_arg (call, 0);
return ptr_deref_may_alias_ref_p_1 (ptr, ref);
return false;
}
+ /* Check if the base variable is call-clobbered. */
if (DECL_P (base))
- return is_call_clobbered (base);
+ return pt_solution_includes (gimple_call_clobber_set (call), base);
else if (INDIRECT_REF_P (base)
&& TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
{
if (!pi)
return true;
- return (pt_solution_includes_global (&pi->pt)
- || pt_solutions_intersect (&cfun->gimple_df->escaped, &pi->pt));
+ return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
}
return true;
}
-static bool ATTRIBUTE_UNUSED
+/* If the call in statement CALL may clobber the memory reference REF
+ return true, otherwise return false. */
+
+bool
call_may_clobber_ref_p (gimple call, tree ref)
{
bool res;
}
-static tree get_continuation_for_phi (gimple, ao_ref *, bitmap *);
-
/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
TARGET or a statement clobbering the memory reference REF in which
case false is returned. The walk starts with VUSE, one argument of PHI. */
clobber REF. Returns NULL_TREE if no suitable virtual operand can
be found. */
-static tree
+tree
get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
{
unsigned nargs = gimple_phi_num_args (phi);
tree arg1 = PHI_ARG_DEF (phi, 1);
gimple def0 = SSA_NAME_DEF_STMT (arg0);
gimple def1 = SSA_NAME_DEF_STMT (arg1);
+ tree common_vuse;
if (arg0 == arg1)
return arg0;
if (maybe_skip_until (phi, arg1, ref, arg0, visited))
return arg1;
}
+ /* Special case of a diamond:
+ MEM_1 = ...
+ goto (cond) ? L1 : L2
+ L1: store1 = ... #MEM_2 = vuse(MEM_1)
+ goto L3
+ L2: store2 = ... #MEM_3 = vuse(MEM_1)
+ L3: MEM_4 = PHI<MEM_2, MEM_3>
+ We were called with the PHI at L3, MEM_2 and MEM_3 don't
+ dominate each other, but still we can easily skip this PHI node
+ if we recognize that the vuse MEM operand is the same for both,
+ and that we can skip both statements (they don't clobber us).
+ This is still linear. Don't use maybe_skip_until, that might
+ potentially be slow. */
+ else if ((common_vuse = gimple_vuse (def0))
+ && common_vuse == gimple_vuse (def1))
+ {
+ if (!stmt_may_clobber_ref_p_1 (def0, ref)
+ && !stmt_may_clobber_ref_p_1 (def1, ref))
+ return common_vuse;
+ }
}
return NULL_TREE;