+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
+
+ /* If the variable cannot be modified and this is a VDEF change
+ it into a VUSE. This happens when read-only variables are marked
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
+ into validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_implicit) && unmodifiable_var_p (var))
+ flags &= ~opf_def;
+
+ /* The variable is not a GIMPLE register. Add it (or its aliases) to
+ virtual operands, unless the caller has specifically requested
+ not to add virtual operands (used when adding operands inside an
+ ADDR_EXPR expression). */
+ if (flags & opf_no_vops)
+ return;
+
+ if (MTAG_P (var))
+ aliases = MTAG_ALIASES (var);
+
+ if (aliases == NULL)
+ {
+ if (s_ann && !gimple_aliases_computed_p (cfun))
+ s_ann->has_volatile_ops = true;
+
+ /* The variable is not aliased or it is an alias tag. */
+ if (flags & opf_def)
+ append_vdef (var);
+ else
+ append_vuse (var);
+ }
+ else
+ {
+ bitmap_iterator bi;
+ unsigned int i;
+ tree al;
+
+ /* The variable is aliased. Add its aliases to the virtual
+ operands. */
+ gcc_assert (!bitmap_empty_p (aliases));
+
+ if (flags & opf_def)
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vdef (al);
+ }
+
+ /* If the variable is also an alias tag, add a virtual
+ operand for it, otherwise we will miss representing
+ references to the members of the variable's alias set.
+ This fixes the bug in gcc.c-torture/execute/20020503-1.c.
+
+ It is also necessary to add bare defs on clobbers for
+ SMT's, so that bare SMT uses caused by pruning all the
+ aliases will link up properly with calls. In order to
+ keep the number of these bare defs we add down to the
+ minimum necessary, we keep track of which SMT's were used
+ alone in statement vdefs or VUSEs. */
+ if (none_added
+ || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ && is_call_site))
+ {
+ append_vdef (var);
+ }
+ }
+ else
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vuse (al);
+ }
+
+ /* Even if no aliases have been added, we still need to
+ establish def-use and use-def chains, lest
+ transformations think that this is not a memory
+ reference. For an example of this scenario, see
+ testsuite/g++.dg/opt/cleanup1.C. */
+ if (none_added)
+ append_vuse (var);
+ }
+ }
+}
+
+
+/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
+ get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
+ the statement's real operands, otherwise it is added to virtual
+ operands. */
+
+static void
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+{
+ tree var, sym;
+ var_ann_t v_ann;
+
+ gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+
+ var = *var_p;
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
+ s_ann->has_volatile_ops = true;
+
+ if (is_gimple_reg (sym))
+ {
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
+}
+
+
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
+
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
+
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known.
+
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
+
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags,
+ tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ tree ptr = *pptr;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ s_ann->references_memory = true;
+ if (s_ann && TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
+
+ if (SSA_VAR_P (ptr))
+ {
+ struct ptr_info_def *pi = NULL;
+
+ /* If PTR has flow-sensitive points-to information, use it. */
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
+ {
+ /* PTR has its own memory tag. Use it. */
+ add_virtual_operand (pi->name_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+ }
+ else
+ {
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its symbol memory tag. */
+ var_ann_t v_ann;
+
+ /* If we are emitting debugging dumps, display a warning if
+ PTR is an SSA_NAME with no flow-sensitive alias
+ information. That means that we may need to compute
+ aliasing again. */
+ if (dump_file
+ && TREE_CODE (ptr) == SSA_NAME
+ && pi == NULL)
+ {
+ fprintf (dump_file,
+ "NOTE: no flow-sensitive alias info for ");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, " in ");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ }
+
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+ v_ann = var_ann (ptr);
+
+ if (v_ann->symbol_mem_tag)
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+
+ /* Aliasing information is missing; mark statement as
+ volatile so we won't optimize it out too actively. */
+ else if (s_ann
+ && !gimple_aliases_computed_p (cfun)
+ && (flags & opf_def))
+ s_ann->has_volatile_ops = true;
+ }
+ }
+ else if (TREE_CODE (ptr) == INTEGER_CST)
+ {
+ /* If a constant is used as a pointer, we can't generate a real
+ operand for it but we mark the statement volatile to prevent
+ optimizations from messing things up. */
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+ else
+ {
+ /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
+ gcc_unreachable ();
+ }
+
+ /* If requested, add a USE operand for the base pointer. */
+ if (recurse_on_base)
+ get_expr_operands (stmt, pptr, opf_use);
+}
+
+
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag, ref;
+ HOST_WIDE_INT offset, size, maxsize;
+ subvar_t svars, sv;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ /* This statement references memory. */
+ s_ann->references_memory = 1;
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
+
+ if (TMR_SYMBOL (expr))
+ add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
+
+ tag = TMR_TAG (expr);
+ if (!tag)
+ {
+ /* Something weird, so ensure that we will be careful. */
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+
+ if (DECL_P (tag))
+ {
+ get_expr_operands (stmt, &tag, flags);
+ return;
+ }
+
+ ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
+ gcc_assert (ref != NULL_TREE);
+ svars = get_subvars_for_var (ref);
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bool exact;
+
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ add_stmt_operand (&sv->var, s_ann, flags);
+ }
+}
+
+
+/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
+ clobbered variables in the function. */
+
+static void
+add_call_clobber_ops (tree stmt, tree callee)
+{
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b, not_written_b;
+
+ /* Functions that are not const, pure or never return may clobber
+ call-clobbered variables. */
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
+
+ /* If we created .GLOBAL_VAR earlier, just use it. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ return;
+ }
+
+ /* Get info for local and module level statics. There is a bit
+ set for each static if the call being processed does not read
+ or write that variable. */
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+ not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
+
+ /* Add a VDEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var_lookup (u);
+ unsigned int escape_mask = var_ann (var)->escape_mask;
+ tree real_var = var;
+ bool not_read;
+ bool not_written;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
+ gcc_assert (!unmodifiable_var_p (var));
+
+ clobber_stats.clobbered_vars++;
+
+ /* See if this variable is really clobbered by this function. */
+
+ /* Trivial case: Things escaping only to pure/const are not
+ clobbered by non-pure-const, and only read by pure/const. */
+ if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
+ {
+ tree call = get_call_expr_in (stmt);
+ if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ {
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ else
+ {
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ }
+
+ if (not_written)
+ {
+ clobber_stats.static_write_clobbers_avoided++;
+ if (!not_read)
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ else
+ clobber_stats.static_read_clobbers_avoided++;
+ }
+ else
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ }
+}
+
+
+/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
+ function. */
+
+static void
+add_call_read_ops (tree stmt, tree callee)
+{
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b;
+
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ return;
+ }
+
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var (u);
+ tree real_var = var;
+ bool not_read;
+
+ clobber_stats.readonly_clobbers++;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ if (not_read)
+ {
+ clobber_stats.static_readonly_clobbers_avoided++;
+ continue;
+ }
+
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ }
+}
+
+
+/* A subroutine of get_expr_operands to handle CALL_EXPR. */
+
+static void
+get_call_expr_operands (tree stmt, tree expr)
+{
+ int call_flags = call_expr_flags (expr);
+ int i, nargs;
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ ann->references_memory = true;
+
+ /* If aliases have been computed already, add VDEF or VUSE
+ operands for all the symbols that have been found to be
+ call-clobbered. */
+ if (gimple_aliases_computed_p (cfun)
+ && !(call_flags & ECF_NOVOPS))
+ {
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
+ if (TREE_SIDE_EFFECTS (expr)
+ && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+ add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+ else if (!(call_flags & ECF_CONST))
+ add_call_read_ops (stmt, get_callee_fndecl (expr));
+ }
+
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
+ nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
+
+ get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
+}
+
+
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
+
+static void
+get_asm_expr_operands (tree stmt)
+{
+ stmt_ann_t s_ann;
+ int i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
+ tree link;
+
+ s_ann = stmt_ann (stmt);
+ noutputs = list_length (ASM_OUTPUTS (stmt));
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
+
+ /* Gather all output operands. */
+ for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
+ {
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+
+ /* This should have been split in gimplify_asm_expr. */
+ gcc_assert (!allows_reg || !is_inout);
+
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
+ {
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
+ }
+
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
+ }
+
+ /* Gather all input operands. */
+ for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+ {
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
+
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
+ {
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
+ }
+
+ get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ }
+
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
+ for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+ if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
+ {
+ unsigned i;
+ bitmap_iterator bi;
+
+ s_ann->references_memory = true;
+
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);