X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-alias.c;h=1b3981564d1e0ceaea0413d3664d18ab8e69e140;hb=d4107d7c17c6f69e7ff93546e24d1490757cf49a;hp=506e778ee062dd3e185e6cc46e4aa39aa41a9b44;hpb=581bf1c292d1521c3a805c39e6dcca54a9a86b6d;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index 506e778ee06..1b3981564d1 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -219,13 +219,6 @@ ptr_deref_may_alias_decl_p (tree ptr, tree decl) if (!pi) return true; - /* If the decl can be used as a restrict tag and we have a restrict - pointer and that pointers points-to set doesn't contain this decl - then they can't alias. */ - if (DECL_RESTRICTED_P (decl) - && pi->pt.vars_contains_restrict) - return bitmap_bit_p (pi->pt.vars, DECL_PT_UID (decl)); - return pt_solution_includes (&pi->pt, decl); } @@ -243,17 +236,6 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2) STRIP_NOPS (ptr1); STRIP_NOPS (ptr2); - /* Anything we do not explicilty handle aliases. */ - if ((TREE_CODE (ptr1) != SSA_NAME - && TREE_CODE (ptr1) != ADDR_EXPR - && TREE_CODE (ptr1) != POINTER_PLUS_EXPR) - || (TREE_CODE (ptr2) != SSA_NAME - && TREE_CODE (ptr2) != ADDR_EXPR - && TREE_CODE (ptr2) != POINTER_PLUS_EXPR) - || !POINTER_TYPE_P (TREE_TYPE (ptr1)) - || !POINTER_TYPE_P (TREE_TYPE (ptr2))) - return true; - /* Disregard pointer offsetting. */ if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR) { @@ -282,7 +264,7 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2) if (base && (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)) - ptr1 = TREE_OPERAND (base, 0); + return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2); else if (base && DECL_P (base)) return ptr_deref_may_alias_decl_p (ptr2, base); @@ -295,7 +277,7 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2) if (base && (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)) - ptr2 = TREE_OPERAND (base, 0); + return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0)); else if (base && DECL_P (base)) return ptr_deref_may_alias_decl_p (ptr1, base); @@ -303,6 +285,13 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2) return true; } + /* From here we require SSA name pointers. Anything else aliases. */ + if (TREE_CODE (ptr1) != SSA_NAME + || TREE_CODE (ptr2) != SSA_NAME + || !POINTER_TYPE_P (TREE_TYPE (ptr1)) + || !POINTER_TYPE_P (TREE_TYPE (ptr2))) + return true; + /* We may end up with two empty points-to solutions for two same pointers. In this case we still want to say both pointers alias, so shortcut that here. */ @@ -316,11 +305,6 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2) if (!pi1 || !pi2) return true; - /* If both pointers are restrict-qualified try to disambiguate - with restrict information. */ - if (!pt_solutions_same_restrict_base (&pi1->pt, &pi2->pt)) - return false; - /* ??? This does not use TBAA to prune decls from the intersection that not both pointers may access. */ return pt_solutions_intersect (&pi1->pt, &pi2->pt); @@ -426,8 +410,6 @@ dump_points_to_solution (FILE *file, struct pt_solution *pt) dump_decl_set (file, pt->vars); if (pt->vars_contains_global) fprintf (file, " (includes global vars)"); - if (pt->vars_contains_restrict) - fprintf (file, " (includes restrict tags)"); } } @@ -470,6 +452,7 @@ ao_ref_init (ao_ref *r, tree ref) r->max_size = -1; r->ref_alias_set = -1; r->base_alias_set = -1; + r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false; } /* Returns the base object of the memory reference *REF. */ @@ -539,6 +522,7 @@ ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size) ref->max_size = ref->size = -1; ref->ref_alias_set = 0; ref->base_alias_set = 0; + ref->volatile_p = false; } /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the @@ -820,8 +804,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, /* If either reference is view-converted, give up now. */ if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 - || same_type_for_tbaa (TREE_TYPE (dbase2), - TREE_TYPE (reference_alias_ptr_type (dbase2))) != 1) + || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1) return true; /* If both references are through the same type, they do not alias @@ -1035,6 +1018,11 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) || TREE_CODE (base2) == LABEL_DECL) return true; + /* Two volatile accesses always conflict. */ + if (ref1->volatile_p + && ref2->volatile_p) + return true; + /* Defer to simple offset based disambiguation if we have references based on two decls. Do this before defering to TBAA to handle must-alias cases in conformance with the @@ -1158,6 +1146,11 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref) if (!base) return true; + /* A call that is not without side-effects might involve volatile + accesses and thus conflicts with all other volatile accesses. */ + if (ref->volatile_p) + return true; + /* If the reference is based on a decl that is not aliased the call cannot possibly use it. */ if (DECL_P (base) @@ -1196,6 +1189,8 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref) case BUILT_IN_MEMPCPY: case BUILT_IN_STPCPY: case BUILT_IN_STPNCPY: + case BUILT_IN_TM_MEMCPY: + case BUILT_IN_TM_MEMMOVE: { ao_ref dref; tree size = NULL_TREE; @@ -1223,6 +1218,7 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref) case BUILT_IN_MEMMOVE_CHK: case BUILT_IN_MEMPCPY_CHK: case BUILT_IN_STPCPY_CHK: + case BUILT_IN_STPNCPY_CHK: { ao_ref dref; tree size = NULL_TREE; @@ -1242,6 +1238,32 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref) size); return refs_may_alias_p_1 (&dref, ref, false); } + + /* The following functions read memory pointed to by their + first argument. */ + CASE_BUILT_IN_TM_LOAD (1): + CASE_BUILT_IN_TM_LOAD (2): + CASE_BUILT_IN_TM_LOAD (4): + CASE_BUILT_IN_TM_LOAD (8): + CASE_BUILT_IN_TM_LOAD (FLOAT): + CASE_BUILT_IN_TM_LOAD (DOUBLE): + CASE_BUILT_IN_TM_LOAD (LDOUBLE): + CASE_BUILT_IN_TM_LOAD (M64): + CASE_BUILT_IN_TM_LOAD (M128): + CASE_BUILT_IN_TM_LOAD (M256): + case BUILT_IN_TM_LOG: + case BUILT_IN_TM_LOG_1: + case BUILT_IN_TM_LOG_2: + case BUILT_IN_TM_LOG_4: + case BUILT_IN_TM_LOG_8: + case BUILT_IN_TM_LOG_FLOAT: + case BUILT_IN_TM_LOG_DOUBLE: + case BUILT_IN_TM_LOG_LDOUBLE: + case BUILT_IN_TM_LOG_M64: + case BUILT_IN_TM_LOG_M128: + case BUILT_IN_TM_LOG_M256: + return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref); + /* These read memory pointed to by the first argument. */ case BUILT_IN_STRDUP: case BUILT_IN_STRNDUP: @@ -1264,6 +1286,7 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref) case BUILT_IN_STACK_SAVE: case BUILT_IN_STACK_RESTORE: case BUILT_IN_MEMSET: + case BUILT_IN_TM_MEMSET: case BUILT_IN_MEMSET_CHK: case BUILT_IN_FREXP: case BUILT_IN_FREXPF: @@ -1462,6 +1485,11 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref) || CONSTANT_CLASS_P (base)) return false; + /* A call that is not without side-effects might involve volatile + accesses and thus conflicts with all other volatile accesses. */ + if (ref->volatile_p) + return true; + /* If the reference is based on a decl that is not aliased the call cannot possibly clobber it. */ if (DECL_P (base) @@ -1494,6 +1522,19 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref) case BUILT_IN_STRCAT: case BUILT_IN_STRNCAT: case BUILT_IN_MEMSET: + case BUILT_IN_TM_MEMSET: + CASE_BUILT_IN_TM_STORE (1): + CASE_BUILT_IN_TM_STORE (2): + CASE_BUILT_IN_TM_STORE (4): + CASE_BUILT_IN_TM_STORE (8): + CASE_BUILT_IN_TM_STORE (FLOAT): + CASE_BUILT_IN_TM_STORE (DOUBLE): + CASE_BUILT_IN_TM_STORE (LDOUBLE): + CASE_BUILT_IN_TM_STORE (M64): + CASE_BUILT_IN_TM_STORE (M128): + CASE_BUILT_IN_TM_STORE (M256): + case BUILT_IN_TM_MEMCPY: + case BUILT_IN_TM_MEMMOVE: { ao_ref dref; tree size = NULL_TREE; @@ -1515,6 +1556,7 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref) case BUILT_IN_MEMMOVE_CHK: case BUILT_IN_MEMPCPY_CHK: case BUILT_IN_STPCPY_CHK: + case BUILT_IN_STPNCPY_CHK: case BUILT_IN_STRCAT_CHK: case BUILT_IN_STRNCAT_CHK: case BUILT_IN_MEMSET_CHK: @@ -1844,8 +1886,10 @@ stmt_kills_ref_p (gimple stmt, tree ref) static bool maybe_skip_until (gimple phi, tree target, ao_ref *ref, - tree vuse, bitmap *visited) + tree vuse, bitmap *visited, bool abort_on_visited) { + basic_block bb = gimple_bb (phi); + if (!*visited) *visited = BITMAP_ALLOC (NULL); @@ -1860,8 +1904,9 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref, { /* An already visited PHI node ends the walk successfully. */ if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt)))) - return true; - vuse = get_continuation_for_phi (def_stmt, ref, visited); + return !abort_on_visited; + vuse = get_continuation_for_phi (def_stmt, ref, + visited, abort_on_visited); if (!vuse) return false; continue; @@ -1870,11 +1915,74 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref, else if (gimple_nop_p (def_stmt) || stmt_may_clobber_ref_p_1 (def_stmt, ref)) return false; + /* If we reach a new basic-block see if we already skipped it + in a previous walk that ended successfully. */ + if (gimple_bb (def_stmt) != bb) + { + if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse))) + return !abort_on_visited; + bb = gimple_bb (def_stmt); + } vuse = gimple_vuse (def_stmt); } return true; } +/* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code + until we hit the phi argument definition that dominates the other one. + Return that, or NULL_TREE if there is no such definition. */ + +static tree +get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1, + ao_ref *ref, bitmap *visited, + bool abort_on_visited) +{ + gimple def0 = SSA_NAME_DEF_STMT (arg0); + gimple def1 = SSA_NAME_DEF_STMT (arg1); + tree common_vuse; + + if (arg0 == arg1) + return arg0; + else if (gimple_nop_p (def0) + || (!gimple_nop_p (def1) + && dominated_by_p (CDI_DOMINATORS, + gimple_bb (def1), gimple_bb (def0)))) + { + if (maybe_skip_until (phi, arg0, ref, arg1, visited, abort_on_visited)) + return arg0; + } + else if (gimple_nop_p (def1) + || dominated_by_p (CDI_DOMINATORS, + gimple_bb (def0), gimple_bb (def1))) + { + if (maybe_skip_until (phi, arg1, ref, arg0, visited, abort_on_visited)) + return arg1; + } + /* Special case of a diamond: + MEM_1 = ... + goto (cond) ? L1 : L2 + L1: store1 = ... #MEM_2 = vuse(MEM_1) + goto L3 + L2: store2 = ... #MEM_3 = vuse(MEM_1) + L3: MEM_4 = PHI + We were called with the PHI at L3, MEM_2 and MEM_3 don't + dominate each other, but still we can easily skip this PHI node + if we recognize that the vuse MEM operand is the same for both, + and that we can skip both statements (they don't clobber us). + This is still linear. Don't use maybe_skip_until, that might + potentially be slow. */ + else if ((common_vuse = gimple_vuse (def0)) + && common_vuse == gimple_vuse (def1)) + { + if (!stmt_may_clobber_ref_p_1 (def0, ref) + && !stmt_may_clobber_ref_p_1 (def1, ref)) + return common_vuse; + } + + return NULL_TREE; +} + + /* Starting from a PHI node for the virtual operand of the memory reference REF find a continuation virtual operand that allows to continue walking statements dominating PHI skipping only statements that cannot possibly @@ -1882,7 +1990,8 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref, be found. */ tree -get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited) +get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited, + bool abort_on_visited) { unsigned nargs = gimple_phi_num_args (phi); @@ -1890,53 +1999,42 @@ get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited) if (nargs == 1) return PHI_ARG_DEF (phi, 0); - /* For two arguments try to skip non-aliasing code until we hit - the phi argument definition that dominates the other one. */ - if (nargs == 2) + /* For two or more arguments try to pairwise skip non-aliasing code + until we hit the phi argument definition that dominates the other one. */ + else if (nargs >= 2) { - tree arg0 = PHI_ARG_DEF (phi, 0); - tree arg1 = PHI_ARG_DEF (phi, 1); - gimple def0 = SSA_NAME_DEF_STMT (arg0); - gimple def1 = SSA_NAME_DEF_STMT (arg1); - tree common_vuse; + tree arg0, arg1; + unsigned i; + + /* Find a candidate for the virtual operand which definition + dominates those of all others. */ + arg0 = PHI_ARG_DEF (phi, 0); + if (!SSA_NAME_IS_DEFAULT_DEF (arg0)) + for (i = 1; i < nargs; ++i) + { + arg1 = PHI_ARG_DEF (phi, i); + if (SSA_NAME_IS_DEFAULT_DEF (arg1)) + { + arg0 = arg1; + break; + } + if (dominated_by_p (CDI_DOMINATORS, + gimple_bb (SSA_NAME_DEF_STMT (arg0)), + gimple_bb (SSA_NAME_DEF_STMT (arg1)))) + arg0 = arg1; + } - if (arg0 == arg1) - return arg0; - else if (gimple_nop_p (def0) - || (!gimple_nop_p (def1) - && dominated_by_p (CDI_DOMINATORS, - gimple_bb (def1), gimple_bb (def0)))) + /* Then pairwise reduce against the found candidate. */ + for (i = 0; i < nargs; ++i) { - if (maybe_skip_until (phi, arg0, ref, arg1, visited)) - return arg0; - } - else if (gimple_nop_p (def1) - || dominated_by_p (CDI_DOMINATORS, - gimple_bb (def0), gimple_bb (def1))) - { - if (maybe_skip_until (phi, arg1, ref, arg0, visited)) - return arg1; - } - /* Special case of a diamond: - MEM_1 = ... - goto (cond) ? L1 : L2 - L1: store1 = ... #MEM_2 = vuse(MEM_1) - goto L3 - L2: store2 = ... #MEM_3 = vuse(MEM_1) - L3: MEM_4 = PHI - We were called with the PHI at L3, MEM_2 and MEM_3 don't - dominate each other, but still we can easily skip this PHI node - if we recognize that the vuse MEM operand is the same for both, - and that we can skip both statements (they don't clobber us). - This is still linear. Don't use maybe_skip_until, that might - potentially be slow. */ - else if ((common_vuse = gimple_vuse (def0)) - && common_vuse == gimple_vuse (def1)) - { - if (!stmt_may_clobber_ref_p_1 (def0, ref) - && !stmt_may_clobber_ref_p_1 (def1, ref)) - return common_vuse; + arg1 = PHI_ARG_DEF (phi, i); + arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref, visited, + abort_on_visited); + if (!arg0) + return NULL_TREE; } + + return arg0; } return NULL_TREE; @@ -1967,6 +2065,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse, { bitmap visited = NULL; void *res; + bool translated = false; timevar_push (TV_ALIAS_STMT_WALK); @@ -1983,7 +2082,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse, if (gimple_nop_p (def_stmt)) break; else if (gimple_code (def_stmt) == GIMPLE_PHI) - vuse = get_continuation_for_phi (def_stmt, ref, &visited); + vuse = get_continuation_for_phi (def_stmt, ref, &visited, translated); else { if (stmt_may_clobber_ref_p_1 (def_stmt, ref)) @@ -2001,6 +2100,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse, else if (res != NULL) break; /* Translation succeeded, continue walking. */ + translated = true; } vuse = gimple_vuse (def_stmt); }