/* SSA operands management for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-#include "cgraph.h"
#include "langhooks.h"
variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
it contains 'a_5' rather than 'a'.
/* Array for building all the v_must_def operands. */
static GTY (()) varray_type build_v_must_defs;
+/* True if the operands for call clobbered vars are cached and valid. */
+bool ssa_call_clobbered_cache_valid;
+bool ssa_ro_call_cache_valid;
+
+/* These arrays are the cached operand vectors for call clobbered calls. */
+static GTY (()) varray_type clobbered_v_may_defs;
+static GTY (()) varray_type clobbered_vuses;
+static GTY (()) varray_type ro_call_vuses;
+static bool clobbered_aliased_loads;
+static bool clobbered_aliased_stores;
+static bool ro_call_aliased_loads;
#ifdef ENABLE_CHECKING
/* Used to make sure operand construction is working on the proper stmt. */
static inline void append_use (tree *);
static void append_v_may_def (tree);
static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree, tree);
-static void add_call_read_ops (tree, tree);
-static void add_stmt_operand (tree *, tree, int);
+static void add_call_clobber_ops (tree);
+static void add_call_read_ops (tree);
+static void add_stmt_operand (tree *, stmt_ann_t, int);
/* Return a vector of contiguous memory for NUM def operands. */
v_may_def_optype v_may_def_ops;
unsigned size;
size = sizeof (struct v_may_def_optype_d)
- + sizeof (v_may_def_operand_type_t) * (num - 1);
+ + sizeof (v_def_use_operand_type_t) * (num - 1);
v_may_def_ops = ggc_alloc (size);
v_may_def_ops->num_v_may_defs = num;
return v_may_def_ops;
{
v_must_def_optype v_must_def_ops;
unsigned size;
- size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
+ size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
v_must_def_ops = ggc_alloc (size);
v_must_def_ops->num_v_must_defs = num;
return v_must_def_ops;
build_v_may_defs = NULL;
build_vuses = NULL;
build_v_must_defs = NULL;
+ if (clobbered_v_may_defs)
+ {
+ ggc_free (clobbered_v_may_defs);
+ ggc_free (clobbered_vuses);
+ clobbered_v_may_defs = NULL;
+ clobbered_vuses = NULL;
+ }
+ if (ro_call_vuses)
+ {
+ ggc_free (ro_call_vuses);
+ ro_call_vuses = NULL;
+ }
}
}
+/* Clear the in_list bits and empty the build array for v_may_defs. */
+
+static inline void
+cleanup_v_may_defs (void)
+{
+ unsigned x, num;
+ num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_v_may_defs, x);
+ var_ann_t ann = var_ann (t);
+ ann->in_v_may_def_list = 0;
+ }
+ VARRAY_POP_ALL (build_v_may_defs);
+}
+
/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
static vuse_optype
num = VARRAY_ACTIVE_SIZE (build_vuses);
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
if (num_v_may_defs > 0)
{
- size_t i, j;
+ size_t i;
tree vuse;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
{
vuse = VARRAY_TREE (build_vuses, i);
- for (j = 0; j < num_v_may_defs; j++)
- {
- if (vuse == VARRAY_TREE (build_v_may_defs, j))
- break;
- }
-
- /* If we found a useless VUSE operand, remove it from the
- operand array by replacing it with the last active element
- in the operand array (unless the useless VUSE was the
- last operand, in which case we simply remove it. */
- if (j != num_v_may_defs)
+ if (TREE_CODE (vuse) != SSA_NAME)
{
- if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
- {
- VARRAY_TREE (build_vuses, i)
- = VARRAY_TREE (build_vuses,
- VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_v_may_def_list)
+ {
+ /* If we found a useless VUSE operand, remove it from the
+ operand array by replacing it with the last active element
+ in the operand array (unless the useless VUSE was the
+ last operand, in which case we simply remove it. */
+ if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
+ {
+ VARRAY_TREE (build_vuses, i)
+ = VARRAY_TREE (build_vuses,
+ VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ }
+ VARRAY_POP (build_vuses);
+
+ /* We want to rescan the element at this index, unless
+ this was the last element, in which case the loop
+ terminates. */
+ i--;
}
- VARRAY_POP (build_vuses);
-
- /* We want to rescan the element at this index, unless
- this was the last element, in which case the loop
- terminates. */
- i--;
}
}
}
+ else
+ /* Clear out the in_list bits. */
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_vuses, x);
+ if (TREE_CODE (t) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (t);
+ ann->in_vuse_list = 0;
+ }
+ }
+
num = VARRAY_ACTIVE_SIZE (build_vuses);
/* We could have reduced the size to zero now, however. */
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
/* The v_may_def build vector wasn't freed because we needed it here.
Free it now with the vuses build vector. */
VARRAY_POP_ALL (build_vuses);
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return vuse_ops;
}
build_diff = false;
for (x = 0; x < num; x++)
{
- tree var = old_ops->v_must_defs[x];
+ tree var = old_ops->v_must_defs[x].def;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (var != VARRAY_TREE (build_v_must_defs, x))
/* Look for VAR in the original vector. */
for (i = 0; i < old_num; i++)
{
- result = old_ops->v_must_defs[i];
+ result = old_ops->v_must_defs[i].def;
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
- v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
+ v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
+ v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
break;
}
}
if (i == old_num)
- v_must_def_ops->v_must_defs[x] = var;
+ {
+ v_must_def_ops->v_must_defs[x].def = var;
+ v_must_def_ops->v_must_defs[x].use = var;
+ }
}
}
VARRAY_POP_ALL (build_v_must_defs);
static inline void
append_v_may_def (tree var)
{
- unsigned i;
+ var_ann_t ann = get_var_ann (var);
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
- if (var == VARRAY_TREE (build_v_may_defs, i))
- return;
+ if (ann->in_v_may_def_list)
+ return;
+ ann->in_v_may_def_list = 1;
VARRAY_PUSH_TREE (build_v_may_defs, var);
}
static inline void
append_vuse (tree var)
{
- size_t i;
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- if (var == VARRAY_TREE (build_vuses, i))
- return;
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ var_ann_t ann = get_var_ann (var);
+
+ if (ann->in_vuse_list || ann->in_v_may_def_list)
+ return;
+ ann->in_vuse_list = 1;
+ }
VARRAY_PUSH_TREE (build_vuses, var);
}
<* compare old_ops_copy and new_ops *>
free_ssa_operands (old_ops); */
-void
+static void
build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
stmt_operands_p new_ops)
{
switch (code)
{
case MODIFY_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
- if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
- /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
- modified in that case. FIXME we should represent somehow
- that it is killed on the fallthrough path. */
- || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
- else
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
- opf_is_def | opf_kill_def);
+ /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
+ either only part of LHS is modified or if the RHS might throw,
+ otherwise, use V_MUST_DEF.
+
+ ??? If it might throw, we should represent somehow that it is killed
+ on the fallthrough path. */
+ {
+ tree lhs = TREE_OPERAND (stmt, 0);
+ int lhs_flags = opf_is_def;
+
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
+
+ /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
+ or not the entire LHS is modified; that depends on what's
+ inside the VIEW_CONVERT_EXPR. */
+ if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
+ lhs = TREE_OPERAND (lhs, 0);
+
+ if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
+ && TREE_CODE (lhs) != COMPONENT_REF
+ && TREE_CODE (lhs) != BIT_FIELD_REF
+ && TREE_CODE (lhs) != REALPART_EXPR
+ && TREE_CODE (lhs) != IMAGPART_EXPR)
+ lhs_flags |= opf_kill_def;
+
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
+ }
break;
case COND_EXPR:
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
- char class;
+ enum tree_code_class class;
tree expr = *expr_p;
+ stmt_ann_t s_ann = stmt_ann (stmt);
if (expr == NULL || expr == error_mark_node)
return;
/* Taking the address of a variable does not represent a
reference to it, but the fact that the stmt takes its address will be
of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, stmt, 0);
+ add_stmt_operand (expr_p, s_ann, 0);
/* If the address is invariant, there may be no interesting variable
references inside. */
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
+ case CONST_DECL:
/* If we found a variable, add it to DEFS or USES depending
on the operand flags. */
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
return;
+ case MISALIGNED_INDIRECT_REF:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ /* fall through */
+
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
get_indirect_ref_operands (stmt, expr, flags);
return;
according to the value of IS_DEF. Recurse if the LHS of the
ARRAY_REF node is not a regular variable. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
/* If the LHS of the compound reference is not a regular variable,
recurse to keep looking for more operands in the subexpression. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (expr), opf_none);
+ case VEC_COND_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
return;
return;
}
+ case REALIGN_LOAD_EXPR:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
+ }
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
return;
default:
- if (class == '1')
+ if (class == tcc_unary)
goto do_unary;
- if (class == '2' || class == '<')
+ if (class == tcc_binary || class == tcc_comparison)
goto do_binary;
- if (class == 'c' || class == 't')
+ if (class == tcc_constant || class == tcc_type)
return;
}
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
- size_t i;
+ unsigned i;
+ bitmap_iterator bi;
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
- });
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
/* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
- });
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
break;
}
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF. */
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
static void
get_indirect_ref_operands (tree stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
tree ptr = *pptr;
- stmt_ann_t ann = stmt_ann (stmt);
+ stmt_ann_t s_ann = stmt_ann (stmt);
/* Stores into INDIRECT_REF operands are never killing definitions. */
flags &= ~opf_kill_def;
- if (REF_ORIGINAL (expr))
- {
- enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
-
- /* If we originally accessed part of a structure, we do it still. */
- if (ocode == ARRAY_REF
- || ocode == COMPONENT_REF
- || ocode == REALPART_EXPR
- || ocode == IMAGPART_EXPR)
- flags &= ~opf_kill_def;
- }
-
if (SSA_VAR_P (ptr))
{
struct ptr_info_def *pi = NULL;
&& pi->name_mem_tag)
{
/* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, stmt, flags);
+ add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
}
else
{
/* If PTR is not an SSA_NAME or it doesn't have a name
tag, use its type memory tag. */
- var_ann_t ann;
+ var_ann_t v_ann;
/* If we are emitting debugging dumps, display a warning if
PTR is an SSA_NAME with no flow-sensitive alias
if (TREE_CODE (ptr) == SSA_NAME)
ptr = SSA_NAME_VAR (ptr);
- ann = var_ann (ptr);
- if (ann->type_mem_tag)
- add_stmt_operand (&ann->type_mem_tag, stmt, flags);
+ v_ann = var_ann (ptr);
+ if (v_ann->type_mem_tag)
+ add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
}
}
optimizations from messing things up. */
else if (TREE_CODE (ptr) == INTEGER_CST)
{
- if (ann)
- ann->has_volatile_ops = true;
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
return;
}
{
/* Make sure we know the object is addressable. */
pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, stmt, 0);
+ add_stmt_operand (pptr, s_ann, 0);
/* Mark the object itself with a VUSE. */
pptr = &TREE_OPERAND (*pptr, 0);
{
tree op;
int call_flags = call_expr_flags (expr);
- tree callee = get_callee_fndecl (expr);
-
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
-
- if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
+ if (!bitmap_empty_p (call_clobbered_vars))
{
/* A 'pure' or a 'const' functions never call clobber anything.
A 'noreturn' function might, but since we don't return anyway
there is no point in recording that. */
if (TREE_SIDE_EFFECTS (expr)
&& !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt, callee);
+ add_call_clobber_ops (stmt);
else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt, callee);
+ add_call_read_ops (stmt);
}
+
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+
+ for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+
}
operands. */
static void
-add_stmt_operand (tree *var_p, tree stmt, int flags)
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
{
bool is_real_op;
tree var, sym;
- stmt_ann_t s_ann = stmt_ann (stmt);
var_ann_t v_ann;
var = *var_p;
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
v_ann = var_ann (sym);
- /* Don't expose volatile variables to the optimizers. */
- if (TREE_THIS_VOLATILE (sym))
- {
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
- }
+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
if (is_real_op)
{
clobbered variables in the function. */
static void
-add_call_clobber_ops (tree stmt, tree callee)
+add_call_clobber_ops (tree stmt)
{
+ unsigned i;
+ tree t;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
+
/* Functions that are not const, pure or never return may clobber
call-clobbered variables. */
- if (stmt_ann (stmt))
- stmt_ann (stmt)->makes_clobbering_call = true;
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
- /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
- a V_MAY_DEF operand for every call clobbered variable. See
- compute_may_aliases for the heuristic used to decide whether
- to create .GLOBAL_VAR or not. */
+ /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
+ for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
- else
{
- size_t i;
-
- /* Get info for module level statics. There is a bit set for
- each static if the call being processed does not read or
- write that variable. */
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
+ return;
+ }
- bitmap not_read_b = callee
- ? get_global_statics_not_read (callee) : NULL;
- bitmap not_written_b = callee
- ? get_global_statics_not_written (callee) : NULL;
+ /* If cache is valid, copy the elements into the build vectors. */
+ if (ssa_call_clobbered_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
+ {
+ t = VARRAY_TREE (clobbered_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
+ }
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
+ {
+ t = VARRAY_TREE (clobbered_v_may_defs, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_v_may_def_list = 1;
+ VARRAY_PUSH_TREE (build_v_may_defs, t);
+ }
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = clobbered_aliased_loads;
+ s_ann->makes_aliased_stores = clobbered_aliased_stores;
+ }
+ return;
+ }
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
- {
- tree var = referenced_var (i);
+ /* Add a V_MAY_DEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ if (TREE_READONLY (var)
+ && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+ add_stmt_operand (&var, &empty_ann, opf_none);
+ else
+ add_stmt_operand (&var, &empty_ann, opf_is_def);
+ }
- bool not_read = not_read_b
- ? bitmap_bit_p(not_read_b, i) : false;
- bool not_written = not_written_b
- ? bitmap_bit_p(not_written_b, i) : false;
+ clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+ clobbered_aliased_stores = empty_ann.makes_aliased_stores;
+ /* Set the flags for a stmt's annotation. */
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+ }
- if (not_read)
- { /* The var is not read during the call. */
- if (not_written)
- {
- /* Nothing. */
- }
- else
- add_stmt_operand (&var, stmt, opf_is_def);
- }
- else
- { /* The var is read during the call. */
- if (not_written)
- add_stmt_operand (&var, stmt, opf_none);
- else
- /* The not_read and not_written bits are only set
- for module static variables. Neither is set
- here, so we may be dealing with a module static
- or we may not. So we still must look anywhere
- else we can (such as the TREE_READONLY) to get
- better info. */
- /* If VAR is read-only, don't add a V_MAY_DEF, just a
- VUSE operand. */
- if (TREE_READONLY (var))
- add_stmt_operand (&var, stmt, opf_none);
- else
- add_stmt_operand (&var, stmt, opf_is_def);
- }
- });
+ /* Prepare empty cache vectors. */
+ if (clobbered_v_may_defs)
+ {
+ VARRAY_POP_ALL (clobbered_vuses);
+ VARRAY_POP_ALL (clobbered_v_may_defs);
+ }
+ else
+ {
+ VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
+ VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
}
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
+ VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
+
+ ssa_call_clobbered_cache_valid = true;
}
function. */
static void
-add_call_read_ops (tree stmt, tree callee)
+add_call_read_ops (tree stmt)
{
- /* Otherwise, if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
- for each call-clobbered variable. See add_referenced_var for the
- heuristic used to decide whether to create .GLOBAL_VAR. */
+ unsigned i;
+ tree t;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
+
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_none);
- else
{
- size_t i;
- bitmap not_read_b = callee
- ? get_global_statics_not_read (callee) : NULL;
-
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ add_stmt_operand (&global_var, s_ann, opf_none);
+ return;
+ }
+
+ /* If cache is valid, copy the elements into the build vector. */
+ if (ssa_ro_call_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
{
- tree var = referenced_var (i);
- bool not_read = not_read_b
- ? bitmap_bit_p(not_read_b, i) : false;
- if (!not_read)
- add_stmt_operand (&var, stmt, opf_none);
- });
+ t = VARRAY_TREE (ro_call_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
+ }
+ if (s_ann)
+ s_ann->makes_aliased_loads = ro_call_aliased_loads;
+ return;
}
+
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, &empty_ann, opf_none);
+ }
+
+ ro_call_aliased_loads = empty_ann.makes_aliased_loads;
+ if (s_ann)
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+
+ /* Prepare empty cache vectors. */
+ if (ro_call_vuses)
+ VARRAY_POP_ALL (ro_call_vuses);
+ else
+ VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+
+ ssa_ro_call_cache_valid = true;
}
/* Copies virtual operands from SRC to DST. */
{
*v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
+ {
+ SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
+ SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
+ }
}
}
free_vuses (&(ann->operands.vuse_ops));
free_v_may_defs (&(ann->operands.v_may_def_ops));
free_v_must_defs (&(ann->operands.v_must_def_ops));
-
+
/* For each VDEF on the original statement, we want to create a
VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
statement. */
for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
{
- op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
+ op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
append_vuse (op);
}