/* SSA operands management for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
+#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
+#include "langhooks.h"
-/* This file contains the code required to mnage the operands cache of the
+/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
- annotation. This cache contains operands that will be of interets to
+ annotation. This cache contains operands that will be of interest to
optimizers and other passes wishing to manipulate the IL.
The operand type are broken up into REAL and VIRTUAL operands. The real
get_stmt_operands() in the primary entry point.
The operand tree is the parsed by the various get_* routines which look
- through the stmt tree for the occurence of operands which may be of
+ through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
found. There are 5 of these routines, each representing one of the
5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
it contains 'a_5' rather than 'a'.
/* Array for building all the v_must_def operands. */
static GTY (()) varray_type build_v_must_defs;
+/* True if the operands for call clobbered vars are cached and valid. */
+bool ssa_call_clobbered_cache_valid;
+bool ssa_ro_call_cache_valid;
+
+/* These arrays are the cached operand vectors for call clobbered calls. */
+static GTY (()) varray_type clobbered_v_may_defs;
+static GTY (()) varray_type clobbered_vuses;
+static GTY (()) varray_type ro_call_vuses;
+static bool clobbered_aliased_loads;
+static bool clobbered_aliased_stores;
+static bool ro_call_aliased_loads;
+
#ifdef ENABLE_CHECKING
/* Used to make sure operand construction is working on the proper stmt. */
tree check_build_stmt;
#endif
+def_operand_p NULL_DEF_OPERAND_P = { NULL };
+use_operand_p NULL_USE_OPERAND_P = { NULL };
+
static void note_addressable (tree, stmt_ann_t);
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void append_v_must_def (tree);
static void add_call_clobber_ops (tree);
static void add_call_read_ops (tree);
-static void add_stmt_operand (tree *, tree, int);
+static void add_stmt_operand (tree *, stmt_ann_t, int);
/* Return a vector of contiguous memory for NUM def operands. */
v_may_def_optype v_may_def_ops;
unsigned size;
size = sizeof (struct v_may_def_optype_d)
- + sizeof (v_may_def_operand_type_t) * (num - 1);
+ + sizeof (v_def_use_operand_type_t) * (num - 1);
v_may_def_ops = ggc_alloc (size);
v_may_def_ops->num_v_may_defs = num;
return v_may_def_ops;
{
v_must_def_optype v_must_def_ops;
unsigned size;
- size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
+ size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
v_must_def_ops = ggc_alloc (size);
v_must_def_ops->num_v_must_defs = num;
return v_must_def_ops;
void
fini_ssa_operands (void)
{
+ ggc_free (build_defs);
+ ggc_free (build_uses);
+ ggc_free (build_v_may_defs);
+ ggc_free (build_vuses);
+ ggc_free (build_v_must_defs);
+ build_defs = NULL;
+ build_uses = NULL;
+ build_v_may_defs = NULL;
+ build_vuses = NULL;
+ build_v_must_defs = NULL;
+ if (clobbered_v_may_defs)
+ {
+ ggc_free (clobbered_v_may_defs);
+ ggc_free (clobbered_vuses);
+ clobbered_v_may_defs = NULL;
+ clobbered_vuses = NULL;
+ }
+ if (ro_call_vuses)
+ {
+ ggc_free (ro_call_vuses);
+ ro_call_vuses = NULL;
+ }
}
if (num == 0)
return NULL;
-#ifdef ENABLE_CHECKING
/* There should only be a single real definition per assignment. */
- if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
initial call to get_stmt_operands does not pass a pointer to a
statement). */
for (x = 0; x < num; x++)
- if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
- abort ();
+ gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
}
#endif
old_ops = *old_ops_p;
}
+/* Clear the in_list bits and empty the build array for v_may_defs. */
+
+static inline void
+cleanup_v_may_defs (void)
+{
+ unsigned x, num;
+ num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_v_may_defs, x);
+ var_ann_t ann = var_ann (t);
+ ann->in_v_may_def_list = 0;
+ }
+ VARRAY_POP_ALL (build_v_may_defs);
+}
+
/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
static vuse_optype
num = VARRAY_ACTIVE_SIZE (build_vuses);
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
if (num_v_may_defs > 0)
{
- size_t i, j;
+ size_t i;
tree vuse;
for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
{
vuse = VARRAY_TREE (build_vuses, i);
- for (j = 0; j < num_v_may_defs; j++)
- {
- if (vuse == VARRAY_TREE (build_v_may_defs, j))
- break;
- }
-
- /* If we found a useless VUSE operand, remove it from the
- operand array by replacing it with the last active element
- in the operand array (unless the useless VUSE was the
- last operand, in which case we simply remove it. */
- if (j != num_v_may_defs)
+ if (TREE_CODE (vuse) != SSA_NAME)
{
- if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
- {
- VARRAY_TREE (build_vuses, i)
- = VARRAY_TREE (build_vuses,
- VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_v_may_def_list)
+ {
+ /* If we found a useless VUSE operand, remove it from the
+ operand array by replacing it with the last active element
+ in the operand array (unless the useless VUSE was the
+ last operand, in which case we simply remove it. */
+ if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
+ {
+ VARRAY_TREE (build_vuses, i)
+ = VARRAY_TREE (build_vuses,
+ VARRAY_ACTIVE_SIZE (build_vuses) - 1);
+ }
+ VARRAY_POP (build_vuses);
+
+ /* We want to rescan the element at this index, unless
+ this was the last element, in which case the loop
+ terminates. */
+ i--;
}
- VARRAY_POP (build_vuses);
-
- /* We want to rescan the element at this index, unless
- this was the last element, in which case the loop
- terminates. */
- i--;
}
}
}
+ else
+ /* Clear out the in_list bits. */
+ for (x = 0; x < num; x++)
+ {
+ tree t = VARRAY_TREE (build_vuses, x);
+ if (TREE_CODE (t) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (t);
+ ann->in_vuse_list = 0;
+ }
+ }
+
num = VARRAY_ACTIVE_SIZE (build_vuses);
/* We could have reduced the size to zero now, however. */
if (num == 0)
{
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return NULL;
}
/* The v_may_def build vector wasn't freed because we needed it here.
Free it now with the vuses build vector. */
VARRAY_POP_ALL (build_vuses);
- VARRAY_POP_ALL (build_v_may_defs);
+ cleanup_v_may_defs ();
return vuse_ops;
}
-
/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
static v_must_def_optype
if (num == 0)
return NULL;
-#ifdef ENABLE_CHECKING
/* There should only be a single V_MUST_DEF per assignment. */
- if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
build_diff = false;
for (x = 0; x < num; x++)
{
- tree var = old_ops->v_must_defs[x];
+ tree var = old_ops->v_must_defs[x].def;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (var != VARRAY_TREE (build_v_must_defs, x))
/* Look for VAR in the original vector. */
for (i = 0; i < old_num; i++)
{
- result = old_ops->v_must_defs[i];
+ result = old_ops->v_must_defs[i].def;
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
- v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
+ v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
+ v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
break;
}
}
if (i == old_num)
- v_must_def_ops->v_must_defs[x] = var;
+ {
+ v_must_def_ops->v_must_defs[x].def = var;
+ v_must_def_ops->v_must_defs[x].use = var;
+ }
}
}
VARRAY_POP_ALL (build_v_must_defs);
static inline void
start_ssa_stmt_operands (void)
{
-#ifdef ENABLE_CHECKING
- if (VARRAY_ACTIVE_SIZE (build_defs) > 0
- || VARRAY_ACTIVE_SIZE (build_uses) > 0
- || VARRAY_ACTIVE_SIZE (build_vuses) > 0
- || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
- || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
- abort ();
-#endif
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
}
static inline void
append_v_may_def (tree var)
{
- unsigned i;
+ var_ann_t ann = get_var_ann (var);
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
- if (var == VARRAY_TREE (build_v_may_defs, i))
- return;
+ if (ann->in_v_may_def_list)
+ return;
+ ann->in_v_may_def_list = 1;
VARRAY_PUSH_TREE (build_v_may_defs, var);
}
static inline void
append_vuse (tree var)
{
- size_t i;
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- if (var == VARRAY_TREE (build_vuses, i))
- return;
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ var_ann_t ann = get_var_ann (var);
+
+ if (ann->in_vuse_list || ann->in_v_may_def_list)
+ return;
+ ann->in_vuse_list = 1;
+ }
VARRAY_PUSH_TREE (build_vuses, var);
}
will be destroyed. It is appropriate to call free_stmt_operands() on
the value returned in old_ops.
- The rationale for this: Certain optimizations wish to exmaine the difference
+ The rationale for this: Certain optimizations wish to examine the difference
between new_ops and old_ops after processing. If a set of operands don't
change, new_ops will simply assume the pointer in old_ops, and the old_ops
pointer will be set to NULL, indicating no memory needs to be cleared.
<* compare old_ops_copy and new_ops *>
free_ssa_operands (old_ops); */
-void
+static void
build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
stmt_operands_p new_ops)
{
switch (code)
{
case MODIFY_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
- if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
- /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
- modified in that case. FIXME we should represent somehow
- that it is killed on the fallthrough path. */
- || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
- else
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
- opf_is_def | opf_kill_def);
+ /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
+ either only part of LHS is modified or if the RHS might throw,
+ otherwise, use V_MUST_DEF.
+
+ ??? If it might throw, we should represent somehow that it is killed
+ on the fallthrough path. */
+ {
+ tree lhs = TREE_OPERAND (stmt, 0);
+ int lhs_flags = opf_is_def;
+
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
+
+ /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
+ or not the entire LHS is modified; that depends on what's
+ inside the VIEW_CONVERT_EXPR. */
+ if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
+ lhs = TREE_OPERAND (lhs, 0);
+
+ if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
+ && TREE_CODE (lhs) != COMPONENT_REF
+ && TREE_CODE (lhs) != BIT_FIELD_REF
+ && TREE_CODE (lhs) != REALPART_EXPR
+ && TREE_CODE (lhs) != IMAGPART_EXPR)
+ lhs_flags |= opf_kill_def;
+
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
+ }
break;
case COND_EXPR:
stmt_ann_t ann;
stmt_operands_t old_operands;
-#if defined ENABLE_CHECKING
/* The optimizers cannot handle statements that are nothing but a
_DECL. This indicates a bug in the gimplifier. */
- if (SSA_VAR_P (stmt))
- abort ();
-#endif
+ gcc_assert (!SSA_VAR_P (stmt));
/* Ignore error statements. */
if (TREE_CODE (stmt) == ERROR_MARK)
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
- char class;
+ enum tree_code_class class;
tree expr = *expr_p;
+ stmt_ann_t s_ann = stmt_ann (stmt);
if (expr == NULL || expr == error_mark_node)
return;
/* Taking the address of a variable does not represent a
reference to it, but the fact that the stmt takes its address will be
of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, stmt, 0);
+ add_stmt_operand (expr_p, s_ann, 0);
/* If the address is invariant, there may be no interesting variable
references inside. */
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
+ case CONST_DECL:
/* If we found a variable, add it to DEFS or USES depending
on the operand flags. */
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
return;
+ case MISALIGNED_INDIRECT_REF:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ /* fall through */
+
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
get_indirect_ref_operands (stmt, expr, flags);
return;
according to the value of IS_DEF. Recurse if the LHS of the
ARRAY_REF node is not a regular variable. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
/* If the LHS of the compound reference is not a regular variable,
recurse to keep looking for more operands in the subexpression. */
if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, stmt, flags);
+ add_stmt_operand (expr_p, s_ann, flags);
else
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_call_expr_operands (stmt, expr);
return;
+ case COND_EXPR:
+ case VEC_COND_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ return;
+
case MODIFY_EXPR:
{
int subflags;
return;
}
+ case REALIGN_LOAD_EXPR:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
+ }
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
return;
default:
- if (class == '1')
+ if (class == tcc_unary)
goto do_unary;
- if (class == '2' || class == '<')
+ if (class == tcc_binary || class == tcc_comparison)
goto do_binary;
- if (class == 'c' || class == 't')
+ if (class == tcc_constant || class == tcc_type)
return;
}
/* If we get here, something has gone wrong. */
+#ifdef ENABLE_CHECKING
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
- abort ();
+ internal_error ("internal error");
+#endif
+ gcc_unreachable ();
}
-/* Scan operands in the ASM_EXPR stmt refered to in INFO. */
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
static void
get_asm_expr_operands (tree stmt)
parse_output_constraint (&constraint, i, 0, 0,
&allows_mem, &allows_reg, &is_inout);
-#if defined ENABLE_CHECKING
/* This should have been split in gimplify_asm_expr. */
- if (allows_reg && is_inout)
- abort ();
-#endif
+ gcc_assert (!allows_reg || !is_inout);
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
- size_t i;
+ unsigned i;
+ bitmap_iterator bi;
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
- });
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
/* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_is_def);
- });
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
break;
}
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF. */
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
static void
get_indirect_ref_operands (tree stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
tree ptr = *pptr;
- stmt_ann_t ann = stmt_ann (stmt);
+ stmt_ann_t s_ann = stmt_ann (stmt);
/* Stores into INDIRECT_REF operands are never killing definitions. */
flags &= ~opf_kill_def;
&& pi->name_mem_tag)
{
/* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, stmt, flags);
+ add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
}
else
{
/* If PTR is not an SSA_NAME or it doesn't have a name
tag, use its type memory tag. */
- var_ann_t ann;
+ var_ann_t v_ann;
/* If we are emitting debugging dumps, display a warning if
PTR is an SSA_NAME with no flow-sensitive alias
if (TREE_CODE (ptr) == SSA_NAME)
ptr = SSA_NAME_VAR (ptr);
- ann = var_ann (ptr);
- if (ann->type_mem_tag)
- add_stmt_operand (&ann->type_mem_tag, stmt, flags);
+ v_ann = var_ann (ptr);
+ if (v_ann->type_mem_tag)
+ add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
}
}
optimizations from messing things up. */
else if (TREE_CODE (ptr) == INTEGER_CST)
{
- if (ann)
- ann->has_volatile_ops = true;
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
return;
}
{
/* Make sure we know the object is addressable. */
pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, stmt, 0);
+ add_stmt_operand (pptr, s_ann, 0);
/* Mark the object itself with a VUSE. */
pptr = &TREE_OPERAND (*pptr, 0);
/* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
else
- abort ();
+ gcc_unreachable ();
/* Add a USE operand for the base pointer. */
get_expr_operands (stmt, pptr, opf_none);
tree op;
int call_flags = call_expr_flags (expr);
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
-
- if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
+ if (!bitmap_empty_p (call_clobbered_vars))
{
/* A 'pure' or a 'const' functions never call clobber anything.
A 'noreturn' function might, but since we don't return anyway
if (TREE_SIDE_EFFECTS (expr)
&& !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
add_call_clobber_ops (stmt);
- else if (!(call_flags & (ECF_CONST | ECF_NORETURN)))
+ else if (!(call_flags & ECF_CONST))
add_call_read_ops (stmt);
}
+
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+
+ for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+
}
operands. */
static void
-add_stmt_operand (tree *var_p, tree stmt, int flags)
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
{
bool is_real_op;
tree var, sym;
- stmt_ann_t s_ann = stmt_ann (stmt);
var_ann_t v_ann;
var = *var_p;
/* The variable is not aliased or it is an alias tag. */
if (flags & opf_is_def)
{
- if (v_ann->is_alias_tag)
- {
- /* Alias tagged vars get V_MAY_DEF to avoid breaking
- def-def chains with the other variables in their
- alias sets. */
- if (s_ann)
- s_ann->makes_aliased_stores = 1;
- append_v_may_def (var);
- }
- else if (flags & opf_kill_def)
+ if (flags & opf_kill_def)
{
-#if defined ENABLE_CHECKING
/* Only regular variables may get a V_MUST_DEF
operand. */
- if (v_ann->mem_tag_kind != NOT_A_TAG)
- abort ();
-#endif
+ gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
/* V_MUST_DEF for non-aliased, non-GIMPLE register
variable definitions. */
append_v_must_def (var);
/* The variable is aliased. Add its aliases to the virtual
operands. */
-#if defined ENABLE_CHECKING
- if (VARRAY_ACTIVE_SIZE (aliases) == 0)
- abort ();
-#endif
+ gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
if (flags & opf_is_def)
{
static void
add_call_clobber_ops (tree stmt)
{
+ unsigned i;
+ tree t;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
+
/* Functions that are not const, pure or never return may clobber
call-clobbered variables. */
- if (stmt_ann (stmt))
- stmt_ann (stmt)->makes_clobbering_call = true;
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
- /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
- a V_MAY_DEF operand for every call clobbered variable. See
- compute_may_aliases for the heuristic used to decide whether
- to create .GLOBAL_VAR or not. */
+ /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
+ for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_is_def);
- else
{
- size_t i;
+ add_stmt_operand (&global_var, s_ann, opf_is_def);
+ return;
+ }
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ /* If cache is valid, copy the elements into the build vectors. */
+ if (ssa_call_clobbered_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
+ {
+ t = VARRAY_TREE (clobbered_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
+ }
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
{
- tree var = referenced_var (i);
+ t = VARRAY_TREE (clobbered_v_may_defs, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_v_may_def_list = 1;
+ VARRAY_PUSH_TREE (build_v_may_defs, t);
+ }
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = clobbered_aliased_loads;
+ s_ann->makes_aliased_stores = clobbered_aliased_stores;
+ }
+ return;
+ }
- /* If VAR is read-only, don't add a V_MAY_DEF, just a
- VUSE operand. */
- if (!TREE_READONLY (var))
- add_stmt_operand (&var, stmt, opf_is_def);
- else
- add_stmt_operand (&var, stmt, opf_none);
- });
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+
+ /* Add a V_MAY_DEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ if (TREE_READONLY (var)
+ && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+ add_stmt_operand (&var, &empty_ann, opf_none);
+ else
+ add_stmt_operand (&var, &empty_ann, opf_is_def);
}
+
+ clobbered_aliased_loads = empty_ann.makes_aliased_loads;
+ clobbered_aliased_stores = empty_ann.makes_aliased_stores;
+
+ /* Set the flags for a stmt's annotation. */
+ if (s_ann)
+ {
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+ }
+
+ /* Prepare empty cache vectors. */
+ if (clobbered_v_may_defs)
+ {
+ VARRAY_POP_ALL (clobbered_vuses);
+ VARRAY_POP_ALL (clobbered_v_may_defs);
+ }
+ else
+ {
+ VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
+ VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
+ }
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
+ VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
+
+ ssa_call_clobbered_cache_valid = true;
}
static void
add_call_read_ops (tree stmt)
{
- /* Otherwise, if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
- for each call-clobbered variable. See add_referenced_var for the
- heuristic used to decide whether to create .GLOBAL_VAR. */
+ unsigned i;
+ tree t;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ struct stmt_ann_d empty_ann;
+
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
if (global_var)
- add_stmt_operand (&global_var, stmt, opf_none);
- else
{
- size_t i;
-
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ add_stmt_operand (&global_var, s_ann, opf_none);
+ return;
+ }
+
+ /* If cache is valid, copy the elements into the build vector. */
+ if (ssa_ro_call_cache_valid)
+ {
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
{
- tree var = referenced_var (i);
- add_stmt_operand (&var, stmt, opf_none);
- });
+ t = VARRAY_TREE (ro_call_vuses, i);
+ gcc_assert (TREE_CODE (t) != SSA_NAME);
+ var_ann (t)->in_vuse_list = 1;
+ VARRAY_PUSH_TREE (build_vuses, t);
+ }
+ if (s_ann)
+ s_ann->makes_aliased_loads = ro_call_aliased_loads;
+ return;
+ }
+
+ memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, &empty_ann, opf_none);
}
+
+ ro_call_aliased_loads = empty_ann.makes_aliased_loads;
+ if (s_ann)
+ s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+
+ /* Prepare empty cache vectors. */
+ if (ro_call_vuses)
+ VARRAY_POP_ALL (ro_call_vuses);
+ else
+ VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+
+ /* Now fill the clobbered cache with the values that have been found. */
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
+ VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+
+ ssa_ro_call_cache_valid = true;
}
/* Copies virtual operands from SRC to DST. */
{
*v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
+ {
+ SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
+ SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
+ }
}
}
/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artifical stmt which looks like a load from the store, this can
+ create an artificial stmt which looks like a load from the store, this can
be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which reperesent a load of the
+ store stmt, and NEW_STMT is the new load which represents a load of the
values stored. */
void
free_vuses (&(ann->operands.vuse_ops));
free_v_may_defs (&(ann->operands.v_may_def_ops));
free_v_must_defs (&(ann->operands.v_must_def_ops));
-
+
/* For each VDEF on the original statement, we want to create a
VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
statement. */
for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
{
- op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
+ op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
append_vuse (op);
}