#include "flags.h"
#include "function.h"
#include "diagnostic.h"
-#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
+#include "toplev.h"
#include "langhooks.h"
VUSE for 'b'. */
#define opf_no_vops (1 << 2)
+/* Operand is a "non-specific" kill for call-clobbers and such. This is used
+ to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
+#define opf_non_specific (1 << 3)
+
/* This structure maintain a sorted list of operands which is created by
parse_ssa_operand. */
struct opbuild_list_d GTY (())
bool ssa_ro_call_cache_valid;
/* These arrays are the cached operand vectors for call clobbered calls. */
-static GTY (()) varray_type clobbered_v_may_defs;
-static GTY (()) varray_type clobbered_vuses;
-static GTY (()) varray_type ro_call_vuses;
+static VEC(tree,heap) *clobbered_v_may_defs;
+static VEC(tree,heap) *clobbered_vuses;
+static VEC(tree,heap) *ro_call_vuses;
static bool clobbered_aliased_loads;
static bool clobbered_aliased_stores;
static bool ro_call_aliased_loads;
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void get_indirect_ref_operands (tree, tree, int);
+static void get_tmr_operands (tree, tree, int);
static void get_call_expr_operands (tree, tree);
static inline void append_def (tree *);
static inline void append_use (tree *);
ggc_free (ptr);
}
- if (clobbered_v_may_defs)
- {
- ggc_free (clobbered_v_may_defs);
- ggc_free (clobbered_vuses);
- clobbered_v_may_defs = NULL;
- clobbered_vuses = NULL;
- }
- if (ro_call_vuses)
- {
- ggc_free (ro_call_vuses);
- ro_call_vuses = NULL;
- }
+ VEC_free (tree, heap, clobbered_v_may_defs);
+ VEC_free (tree, heap, clobbered_vuses);
+ VEC_free (tree, heap, ro_call_vuses);
ops_active = false;
}
get_indirect_ref_operands (stmt, expr, flags);
return;
+ case TARGET_MEM_REF:
+ get_tmr_operands (stmt, expr, flags);
+ return;
+
case ARRAY_REF:
case ARRAY_RANGE_REF:
/* Treat array references as references to the virtual variable
bool exact;
if (overlap_subvar (offset, size, sv, &exact))
{
- if (exact)
+ if (!exact)
flags &= ~opf_kill_def;
add_stmt_operand (&sv->var, s_ann, flags);
}
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
}
/* Now clobber all addressables. */
&& get_subvars_for_var (var) != NULL)
continue;
- add_stmt_operand (&var, s_ann, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
}
break;
get_expr_operands (stmt, pptr, opf_none);
}
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag = TMR_TAG (expr);
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
+
+ /* MEM_REFs should never be killing. */
+ flags &= ~opf_kill_def;
+
+ if (TMR_SYMBOL (expr))
+ note_addressable (TMR_SYMBOL (expr), stmt_ann (stmt));
+
+ if (tag)
+ add_stmt_operand (&tag, stmt_ann (stmt), flags);
+ else
+ /* Something weird, so ensure that we will be careful. */
+ stmt_ann (stmt)->has_volatile_ops = true;
+}
+
/* A subroutine of get_expr_operands to handle CALL_EXPR. */
static void
/* If the variable cannot be modified and this is a V_MAY_DEF change
it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writeable variables. So we only
- check that this only happens on stores, and not writes to GIMPLE
- registers.
-
- FIXME: The C++ FE is emitting assignments in the IL stream for
- read-only globals. This is wrong, but for the time being disable
- this transformation on V_MUST_DEF operands (otherwise, we
- mis-optimize SPEC2000's eon). */
- if ((flags & opf_is_def)
- && !(flags & opf_kill_def)
- && unmodifiable_var_p (var))
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ modify_expr, then we can't suppress the V_DEF, lest we run into
+ validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_non_specific) && unmodifiable_var_p (var))
{
gcc_assert (!is_real_op);
- flags &= ~opf_is_def;
+ flags &= ~(opf_is_def | opf_kill_def);
}
if (is_real_op)
static void
note_addressable (tree var, stmt_ann_t s_ann)
{
- tree ref;
subvar_t svars;
- HOST_WIDE_INT offset;
- HOST_WIDE_INT size;
if (!s_ann)
return;
- /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
- take the address of the subvariables it will touch.
- Otherwise, we take the address of all the subvariables, plus the real
- ones. */
-
- if (var && TREE_CODE (var) == COMPONENT_REF
- && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
- {
- subvar_t sv;
- svars = get_subvars_for_var (ref);
-
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
-
- for (sv = svars; sv; sv = sv->next)
- {
- if (overlap_subvar (offset, size, sv, NULL))
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
- }
- return;
- }
+ /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
+ as the only thing we take the address of.
+ See PR 21407 and the ensuing mailing list discussion. */
var = get_base_address (var);
if (var && SSA_VAR_P (var))
{
/* Process the caches in reverse order so we are always inserting at
the head of the list. */
- for (i = VARRAY_ACTIVE_SIZE (clobbered_vuses) - 1; i >=0; i--)
+ for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
{
- t = VARRAY_TREE (clobbered_vuses, i);
+ t = VEC_index (tree, clobbered_vuses, i);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_vuse_list = 1;
opbuild_append_virtual (&build_vuses, t);
}
- for (i = VARRAY_ACTIVE_SIZE (clobbered_v_may_defs) - 1; i >= 0; i--)
+ for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
{
- t = VARRAY_TREE (clobbered_v_may_defs, i);
+ t = VEC_index (tree, clobbered_v_may_defs, i);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_v_may_def_list = 1;
opbuild_append_virtual (&build_v_may_defs, t);
if (unmodifiable_var_p (var))
add_stmt_operand (&var, &empty_ann, opf_none);
else
- add_stmt_operand (&var, &empty_ann, opf_is_def);
+ add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
}
clobbered_aliased_loads = empty_ann.makes_aliased_loads;
}
/* Prepare empty cache vectors. */
- if (clobbered_v_may_defs)
- {
- VARRAY_POP_ALL (clobbered_vuses);
- VARRAY_POP_ALL (clobbered_v_may_defs);
- }
- else
- {
- VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
- VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
- }
+ VEC_truncate (tree, clobbered_vuses, 0);
+ VEC_truncate (tree, clobbered_v_may_defs, 0);
/* Now fill the clobbered cache with the values that have been found. */
for (i = opbuild_first (&build_vuses);
i != OPBUILD_LAST;
i = opbuild_next (&build_vuses, i))
- VARRAY_PUSH_TREE (clobbered_vuses, opbuild_elem_virtual (&build_vuses, i));
+ VEC_safe_push (tree, heap, clobbered_vuses,
+ opbuild_elem_virtual (&build_vuses, i));
gcc_assert (opbuild_num_elems (&build_vuses)
- == VARRAY_ACTIVE_SIZE (clobbered_vuses));
+ == VEC_length (tree, clobbered_vuses));
for (i = opbuild_first (&build_v_may_defs);
i != OPBUILD_LAST;
i = opbuild_next (&build_v_may_defs, i))
- VARRAY_PUSH_TREE (clobbered_v_may_defs,
- opbuild_elem_virtual (&build_v_may_defs, i));
+ VEC_safe_push (tree, heap, clobbered_v_may_defs,
+ opbuild_elem_virtual (&build_v_may_defs, i));
gcc_assert (opbuild_num_elems (&build_v_may_defs)
- == VARRAY_ACTIVE_SIZE (clobbered_v_may_defs));
+ == VEC_length (tree, clobbered_v_may_defs));
ssa_call_clobbered_cache_valid = true;
}
/* If cache is valid, copy the elements into the build vector. */
if (ssa_ro_call_cache_valid)
{
- for (i = VARRAY_ACTIVE_SIZE (ro_call_vuses) - 1; i >=0 ; i--)
+ for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
{
/* Process the caches in reverse order so we are always inserting at
the head of the list. */
- t = VARRAY_TREE (ro_call_vuses, i);
+ t = VEC_index (tree, ro_call_vuses, i);
gcc_assert (TREE_CODE (t) != SSA_NAME);
var_ann (t)->in_vuse_list = 1;
opbuild_append_virtual (&build_vuses, t);
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
tree var = referenced_var (u);
- add_stmt_operand (&var, &empty_ann, opf_none);
+ add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
}
ro_call_aliased_loads = empty_ann.makes_aliased_loads;
s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
/* Prepare empty cache vectors. */
- if (ro_call_vuses)
- VARRAY_POP_ALL (ro_call_vuses);
- else
- VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
-
+ VEC_truncate (tree, ro_call_vuses, 0);
/* Now fill the clobbered cache with the values that have been found. */
for (i = opbuild_first (&build_vuses);
i != OPBUILD_LAST;
i = opbuild_next (&build_vuses, i))
- VARRAY_PUSH_TREE (ro_call_vuses, opbuild_elem_virtual (&build_vuses, i));
+ VEC_safe_push (tree, heap, ro_call_vuses,
+ opbuild_elem_virtual (&build_vuses, i));
gcc_assert (opbuild_num_elems (&build_vuses)
- == VARRAY_ACTIVE_SIZE (ro_call_vuses));
+ == VEC_length (tree, ro_call_vuses));
ssa_ro_call_cache_valid = true;
}