variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
- vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'. */
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
+ operand vector for VUSE, then the new vector will also be modified
+ such that it contains 'a_5' rather than 'a'. */
/* Structure storing statistics on how many call clobbers we have, and
/* Comparison function for qsort used in operand_build_sort_virtual. */
-static int
+int
operand_build_cmp (const void *p, const void *q)
{
const_tree const e1 = *((const_tree const *)p);
gimple_ssa_operands (cfun)->ssa_operand_mem_size
= OP_SIZE_3 * sizeof (struct voptype_d);
- /* Fail if there is not enough space. If there are this many operands
- required, first make sure there isn't a different problem causing this
- many operands. If the decision is that this is OK, then we can
- specially allocate a buffer just for this request. */
- gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+ /* We can reliably trigger the case that we need arbitrary many
+ operands (see PR34093), so allocate a buffer just for this request. */
+ if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = size;
ptr = (struct ssa_operand_memory_d *)
ggc_alloc (sizeof (struct ssa_operand_memory_d)
}
-/* Return a virtual op pointer with NUM elements which are all initialized to OP
- and are linked into the immediate uses for STMT. The new vop is appended
- after PREV. */
+/* Return a virtual op pointer with NUM elements which are all
+ initialized to OP and are linked into the immediate uses for STMT.
+ The new vop is appended after PREV. */
static inline voptype_p
add_vop (tree stmt, tree op, int num, voptype_p prev)
if (ref && TREE_CODE (ref) == TARGET_MEM_REF)
return true;
- /* If ALIAS is an SFT, it can't be touched if the offset
- and size of the access is not overlapping with the SFT offset and
- size. This is only true if we are accessing through a pointer
- to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
- be accessing through a pointer to some substruct of the
- structure, and if we try to prune there, we will have the wrong
- offset, and get the wrong answer.
- i.e., we can't prune without more work if we have something like
-
- struct gcc_target
- {
- struct asm_out
- {
- const char *byte_op;
- struct asm_int_op
- {
- const char *hi;
- } aligned_op;
- } asm_out;
- } targetm;
-
- foo = &targetm.asm_out.aligned_op;
- return foo->hi;
-
- SFT.1, which represents hi, will have SFT_OFFSET=32 because in
- terms of SFT_PARENT_VAR, that is where it is.
- However, the access through the foo pointer will be at offset 0. */
- if (size != -1
- && TREE_CODE (alias) == STRUCT_FIELD_TAG
- && base
- && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
- && !overlap_subvar (offset, size, alias, NULL))
- {
-#ifdef ACCESS_DEBUGGING
- fprintf (stderr, "Access to ");
- print_generic_expr (stderr, ref, 0);
- fprintf (stderr, " may not touch ");
- print_generic_expr (stderr, alias, 0);
- fprintf (stderr, " in function %s\n", get_name (current_function_decl));
-#endif
- return false;
- }
-
/* Without strict aliasing, it is impossible for a component access
through a pointer to touch a random variable, unless that
variable *is* a structure or a pointer.
my_char_ref_1 = (char[1:1] &) &my_char;
D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1};
*/
- else if (ref
+ if (ref
&& flag_strict_aliasing
&& TREE_CODE (ref) != INDIRECT_REF
&& !MTAG_P (alias)
return true;
}
-/* Add the actual variables FULL_REF can access, given a member of
- full_ref's points-to set VAR, where FULL_REF is an access of SIZE at
- OFFSET from var. IS_CALL_SITE is true if this is a call, and IS_DEF
- is true if this is supposed to be a vdef, and false if this should
- be a VUSE.
-
- The real purpose of this function is to take a points-to set for a
- pointer to a structure, say
-
- struct s {
- int a;
- int b;
- } foo, *foop = &foo;
-
- and discover which variables an access, such as foop->b, can alias.
-
- This is necessary because foop only actually points to foo's first
- member, so that is all the points-to set contains. However, an access
- to foop->a may be touching some single SFT if we have created some
- SFT's for a structure. */
-
-static bool
-add_vars_for_offset (tree var,
- unsigned HOST_WIDE_INT offset, unsigned HOST_WIDE_INT size,
- bool is_def)
-{
- bool added = false;
- tree subvar;
- subvar_t sv;
- unsigned int i;
-
- /* Adjust offset by the pointed-to location. */
- offset += SFT_OFFSET (var);
-
- /* Add all subvars of var that overlap with the access.
- Binary search for the first relevant SFT. */
- sv = get_subvars_for_var (SFT_PARENT_VAR (var));
- if (!get_first_overlapping_subvar (sv, offset, size, &i))
- return false;
-
- for (; VEC_iterate (tree, sv, i, subvar); ++i)
- {
- if (SFT_OFFSET (subvar) > offset
- && size <= SFT_OFFSET (subvar) - offset)
- break;
-
- if (is_def)
- append_vdef (subvar);
- else
- append_vuse (subvar);
- added = true;
- }
-
- return added;
-}
-
/* Add VAR to the virtual operands array. FLAGS is as in
get_expr_operands. FULL_REF is a tree that contains the entire
pointer dereference expression, if available, or NULL otherwise.
{
tree al = referenced_var (i);
- /* For SFTs we have to consider all subvariables of the parent var
- if it is a potential points-to location. */
- if (TREE_CODE (al) == STRUCT_FIELD_TAG
- && TREE_CODE (var) == NAME_MEMORY_TAG)
- none_added &= !add_vars_for_offset (al, offset, size,
- flags & opf_def);
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ /* If we do not know the full reference tree or if the access is
+ unspecified [0, -1], we cannot prune it. Otherwise try doing
+ so using access_can_touch_variable. */
+ if (full_ref
+ && !access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ if (flags & opf_def)
+ append_vdef (al);
else
- {
- /* Call-clobbered tags may have non-call-clobbered
- symbols in their alias sets. Ignore them if we are
- adding VOPs for a call site. */
- if (is_call_site && !is_call_clobbered (al))
- continue;
-
- /* If we do not know the full reference tree or if the access is
- unspecified [0, -1], we cannot prune it. Otherwise try doing
- so using access_can_touch_variable. */
- if (full_ref
- && !(offset == 0 && size == -1)
- && !access_can_touch_variable (full_ref, al, offset, size))
- continue;
-
- if (flags & opf_def)
- append_vdef (al);
- else
- append_vuse (al);
- none_added = false;
- }
+ append_vuse (al);
+ none_added = false;
}
if (flags & opf_def)
is the same as in get_indirect_ref_operands. */
static void
-get_addr_dereference_operands (tree stmt, tree *addr, int flags,
- tree full_ref,
- HOST_WIDE_INT offset, HOST_WIDE_INT size,
- bool recurse_on_base)
- {
- tree ptr = *addr;
+get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree ptr = *addr;
stmt_ann_t s_ann = stmt_ann (stmt);
s_ann->references_memory = true;
/* If we are emitting debugging dumps, display a warning if
PTR is an SSA_NAME with no flow-sensitive alias
information. That means that we may need to compute
- aliasing again. */
+ aliasing again or that a propagation pass forgot to
+ update the alias information on the pointers. */
if (dump_file
&& TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
+ && (pi == NULL
+ || (pi->name_mem_tag == NULL_TREE
+ && !pi->pt_anything)))
{
fprintf (dump_file,
"NOTE: no flow-sensitive alias info for ");
print_generic_expr (dump_file, ptr, dump_flags);
fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
+ print_generic_stmt (dump_file, stmt, 0);
}
if (TREE_CODE (ptr) == SSA_NAME)
to make sure to not prune virtual operands based on offset
and size. */
if (v_ann->symbol_mem_tag)
- add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
- full_ref, 0, -1, false);
+ {
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, 0, -1, false);
+ /* Make sure we add the SMT itself. */
+ if (!(flags & opf_no_vops))
+ {
+ if (flags & opf_def)
+ append_vdef (v_ann->symbol_mem_tag);
+ else
+ append_vuse (v_ann->symbol_mem_tag);
+ }
+ }
/* Aliasing information is missing; mark statement as
volatile so we won't optimize it out too actively. */
get_expr_operands (stmt, addr, opf_use);
}
+
/* A subroutine of get_expr_operands to handle INDIRECT_REF,
ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
something else will do it for us. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags,
- tree full_ref,
- HOST_WIDE_INT offset, HOST_WIDE_INT size,
- bool recurse_on_base)
+get_indirect_ref_operands (tree stmt, tree expr, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
{
tree *pptr = &TREE_OPERAND (expr, 0);
stmt_ann_t s_ann = stmt_ann (stmt);
if (TREE_THIS_VOLATILE (expr))
s_ann->has_volatile_ops = true;
- get_addr_dereference_operands (stmt, pptr, flags, full_ref,
- offset, size, recurse_on_base);
+ get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size,
+ recurse_on_base);
}
+
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
tree real_var = var;
bool not_read;
bool not_written;
-
- /* Not read and not written are computed on regular vars, not
- subvars, so look at the parent var if this is an SFT. */
- if (TREE_CODE (var) == STRUCT_FIELD_TAG)
- real_var = SFT_PARENT_VAR (var);
not_read = not_read_b
? bitmap_bit_p (not_read_b, DECL_UID (real_var))
clobber_stats.readonly_clobbers++;
- /* Not read and not written are computed on regular vars, not
- subvars, so look at the parent var if this is an SFT. */
-
- if (TREE_CODE (var) == STRUCT_FIELD_TAG)
- real_var = SFT_PARENT_VAR (var);
-
not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
: false;
EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
{
tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so we
- don't need the original to be added to the clobber ops,
- but the original *will* be in this list because we keep
- the addressability of the original variable up-to-date
- to avoid confusing the back-end. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
}
break;
a preserving definition (VDEF).
Preserving definitions are those that modify a part of an
- aggregate object for which no subvars have been computed (or the
- reference does not correspond exactly to one of them). Stores
- through a pointer are also represented with VDEF operators.
+ aggregate object. Stores through a pointer are also represented
+ with VDEF operators.
We used to distinguish between preserving and killing definitions.
We always emit preserving definitions now. */
return;
case SSA_NAME:
- case STRUCT_FIELD_TAG:
case SYMBOL_MEMORY_TAG:
case NAME_MEMORY_TAG:
add_stmt_operand (expr_p, s_ann, flags);
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- {
- subvar_t svars;
-
- /* Add the subvars for a variable, if it has subvars, to DEFS
- or USES. Otherwise, add the variable itself. Whether it
- goes to USES or DEFS depends on the operand flags. */
- if (var_can_have_subvars (expr)
- && (svars = get_subvars_for_var (expr)))
- {
- unsigned int i;
- tree subvar;
- for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
- add_stmt_operand (&subvar, s_ann, flags);
- }
- else
- add_stmt_operand (expr_p, s_ann, flags);
-
- return;
- }
+ add_stmt_operand (expr_p, s_ann, flags);
+ return;
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
{
tree ref;
HOST_WIDE_INT offset, size, maxsize;
- bool none = true;
if (TREE_THIS_VOLATILE (expr))
s_ann->has_volatile_ops = true;
- /* This component reference becomes an access to all of the
- subvariables it can touch, if we can determine that, but
- *NOT* the real one. If we can't determine which fields we
- could touch, the recursion will eventually get to a
- variable and add *all* of its subvars, or whatever is the
- minimum correct subset. */
ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
- if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
- {
- subvar_t svars = get_subvars_for_var (ref);
- unsigned int i;
- tree subvar;
-
- for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
- {
- bool exact;
-
- if (overlap_subvar (offset, maxsize, subvar, &exact))
- {
- int subvar_flags = flags;
- none = false;
- add_stmt_operand (&subvar, s_ann, subvar_flags);
- }
- }
-
- if (!none)
- flags |= opf_no_vops;
-
- if ((DECL_P (ref) && TREE_THIS_VOLATILE (ref))
- || (TREE_CODE (ref) == SSA_NAME
- && TREE_THIS_VOLATILE (SSA_NAME_VAR (ref))))
- s_ann->has_volatile_ops = true;
- }
- else if (TREE_CODE (ref) == INDIRECT_REF)
+ if (TREE_CODE (ref) == INDIRECT_REF)
{
get_indirect_ref_operands (stmt, ref, flags, expr, offset,
maxsize, false);
flags |= opf_no_vops;
}
- /* Even if we found subvars above we need to ensure to see
- immediate uses for d in s.a[d]. In case of s.a having
- a subvar or we would miss it otherwise. */
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
case OMP_FOR:
{
- tree init = OMP_FOR_INIT (expr);
- tree cond = OMP_FOR_COND (expr);
- tree incr = OMP_FOR_INCR (expr);
tree c, clauses = OMP_FOR_CLAUSES (stmt);
+ int i;
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
- get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
- get_expr_operands (stmt, &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1), 1),
- opf_use);
+ for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (expr)); i++)
+ {
+ tree init = TREE_VEC_ELT (OMP_FOR_INIT (expr), i);
+ tree cond = TREE_VEC_ELT (OMP_FOR_COND (expr), i);
+ tree incr = TREE_VEC_ELT (OMP_FOR_INCR (expr), i);
+
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
+ get_expr_operands (stmt,
+ &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1),
+ 1), opf_use);
+ }
c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
if (c)
case OMP_ATOMIC_LOAD:
{
- tree *addr = &TREE_OPERAND (expr, 1);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
-
- if (TREE_CODE (*addr) == ADDR_EXPR)
- get_expr_operands (stmt, &TREE_OPERAND (*addr, 0), opf_def);
- else
- get_addr_dereference_operands (stmt, addr, opf_def,
- NULL_TREE, 0, -1, true);
- return;
+ tree *addr = &TREE_OPERAND (expr, 1);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
+
+ if (TREE_CODE (*addr) == ADDR_EXPR)
+ get_expr_operands (stmt, &TREE_OPERAND (*addr, 0), opf_def);
+ else
+ get_addr_dereference_operands (stmt, addr, opf_def,
+ NULL_TREE, 0, -1, true);
+ return;
}
case OMP_ATOMIC_STORE:
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
- return;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ return;
}
case BLOCK:
case OMP_RETURN:
case OMP_SECTION:
case OMP_SECTIONS_SWITCH:
+ case PREDICT_EXPR:
/* Expressions that make no memory references. */
return;
if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
ann->addresses_taken = NULL;
+
/* For added safety, assume that statements with volatile operands
also reference memory. */
if (ann->has_volatile_ops)
ann->references_memory = true;
}
+
/* Releases the operands of STMT back to their freelists, and clears
the stmt operand lists. */
}
}
+
/* Free any operands vectors in OPS. */
void
create an artificial stmt which looks like a load from the store, this can
be used to eliminate redundant loads. OLD_OPS are the operands from the
store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+ values stored. If DELINK_IMM_USES_P is specified, the immediate
+ uses of this stmt will be de-linked. */
void
-create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
+create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt,
+ bool delink_imm_uses_p)
{
tree op;
ssa_op_iter iter;
use_operand_p use_p;
unsigned i;
+ stmt_ann_t ann;
- get_stmt_ann (new_stmt);
+ /* Create the stmt annotation but make sure to not mark the stmt
+ as modified as we will build operands ourselves. */
+ ann = get_stmt_ann (new_stmt);
+ ann->modified = 0;
/* Process NEW_STMT looking for operands. */
start_ssa_stmt_operands ();
if (TREE_CODE (op) != SSA_NAME)
var_ann (op)->in_vuse_list = false;
- for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++)
if (TREE_CODE (op) != SSA_NAME)
var_ann (op)->in_vdef_list = false;
VEC_truncate (tree, build_vdefs, 0);
VEC_truncate (tree, build_vuses, 0);
+ /* Clear the loads and stores bitmaps. */
+ bitmap_clear (build_loads);
+ bitmap_clear (build_stores);
+
/* For each VDEF on the original statement, we want to create a
VUSE of the VDEF result operand on the new statement. */
FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
finalize_ssa_stmt_operands (new_stmt);
/* All uses in this fake stmt must not be in the immediate use lists. */
- FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
- delink_imm_use (use_p);
+ if (delink_imm_uses_p)
+ FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
}
/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
*ADDRESSES_TAKEN is NULL, a new set is created. REF may be
a single variable whose address has been taken or any other valid
- GIMPLE memory reference (structure reference, array, etc). If the
- base address of REF is a decl that has sub-variables, also add all
- of its sub-variables. */
+ GIMPLE memory reference (structure reference, array, etc). */
void
add_to_addressable_set (tree ref, bitmap *addresses_taken)
{
tree var;
- subvar_t svars;
gcc_assert (addresses_taken);
{
if (*addresses_taken == NULL)
*addresses_taken = BITMAP_GGC_ALLOC ();
-
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- unsigned int i;
- tree subvar;
- for (i = 0; VEC_iterate (tree, svars, i, subvar); ++i)
- {
- bitmap_set_bit (*addresses_taken, DECL_UID (subvar));
- TREE_ADDRESSABLE (subvar) = 1;
- }
- }
- else
- {
- bitmap_set_bit (*addresses_taken, DECL_UID (var));
- TREE_ADDRESSABLE (var) = 1;
- }
+ bitmap_set_bit (*addresses_taken, DECL_UID (var));
+ TREE_ADDRESSABLE (var) = 1;
}
}
else if (!bitmap_equal_p (s1, s2))
{
bitmap t1 = BITMAP_ALLOC (NULL);
- bitmap t2 = BITMAP_ALLOC (NULL);
-
- bitmap_and_compl (t1, s1, s2);
- bitmap_and_compl (t2, s2, s1);
- bitmap_ior_into (t1, t2);
+ bitmap_xor (t1, s1, s2);
mark_set_for_renaming (t1);
-
BITMAP_FREE (t1);
- BITMAP_FREE (t2);
}
}