/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
/* Bitmap obstack for our datastructures that needs to survive across
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
+
/* Set for building all the loaded symbols. */
static bitmap build_loads;
static inline int
vop_free_bucket_index (int num)
{
- gcc_assert (num > 0);
+ gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
/* Sizes 1 through 16 use buckets 0-15. */
if (num <= 16)
return num - 1;
- /* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */
- if (num < 256)
- return 14 + (num - 1) / 8;
- return -1;
+ /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
+ num = 14 + (num - 1) / 8;
+ if (num >= NUM_VOP_FREE_BUCKETS)
+ return -1;
+ else
+ return num;
}
static inline def_optype_p
add_def_op (tree *op, def_optype_p last)
{
- def_optype_p new;
+ def_optype_p new_def;
- new = alloc_def ();
- DEF_OP_PTR (new) = op;
- last->next = new;
- new->next = NULL;
- return new;
+ new_def = alloc_def ();
+ DEF_OP_PTR (new_def) = op;
+ last->next = new_def;
+ new_def->next = NULL;
+ return new_def;
}
static inline use_optype_p
add_use_op (tree stmt, tree *op, use_optype_p last)
{
- use_optype_p new;
+ use_optype_p new_use;
- new = alloc_use ();
- USE_OP_PTR (new)->use = op;
- link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
- last->next = new;
- new->next = NULL;
- return new;
+ new_use = alloc_use ();
+ USE_OP_PTR (new_use)->use = op;
+ link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
+ last->next = new_use;
+ new_use->next = NULL;
+ return new_use;
}
static inline voptype_p
add_vop (tree stmt, tree op, int num, voptype_p prev)
{
- voptype_p new;
+ voptype_p new_vop;
int x;
- new = alloc_vop (num);
+ new_vop = alloc_vop (num);
for (x = 0; x < num; x++)
{
- VUSE_OP_PTR (new, x)->prev = NULL;
- SET_VUSE_OP (new, x, op);
- VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
- link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
+ VUSE_OP_PTR (new_vop, x)->prev = NULL;
+ SET_VUSE_OP (new_vop, x, op);
+ VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var;
+ link_imm_use_stmt (VUSE_OP_PTR (new_vop, x),
+ new_vop->usev.uses[x].use_var, stmt);
}
if (prev)
- prev->next = new;
- new->next = NULL;
- return new;
+ prev->next = new_vop;
+ new_vop->next = NULL;
+ return new_vop;
}
static inline voptype_p
add_vuse_op (tree stmt, tree op, int num, voptype_p last)
{
- voptype_p new = add_vop (stmt, op, num, last);
- VDEF_RESULT (new) = NULL_TREE;
- return new;
+ voptype_p new_vop = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new_vop) = NULL_TREE;
+ return new_vop;
}
static inline voptype_p
add_vdef_op (tree stmt, tree op, int num, voptype_p last)
{
- voptype_p new = add_vop (stmt, op, num, last);
- VDEF_RESULT (new) = op;
- return new;
+ voptype_p new_vop = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new_vop) = op;
+ return new_vop;
}
is the head of the operand list it belongs to. */
static inline struct voptype_d *
-realloc_vop (struct voptype_d *ptr, int num_elem, struct voptype_d **root)
+realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
+ struct voptype_d **root)
{
- int x, lim;
+ unsigned int x, lim;
tree stmt, val;
struct voptype_d *ret, *tmp;
/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
struct voptype_d *
-realloc_vdef (struct voptype_d *ptr, int num_elem)
+realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
{
tree val, stmt;
struct voptype_d *ret;
/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
struct voptype_d *
-realloc_vuse (struct voptype_d *ptr, int num_elem)
+realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
{
tree stmt;
struct voptype_d *ret;
static inline void
finalize_ssa_vuse_ops (tree stmt)
{
- unsigned new_i;
- int old_i;
+ unsigned new_i, old_i;
voptype_p old_ops, last;
VEC(tree,heap) *new_ops;
stmt_ann_t ann;
SET_USE (VUSE_OP_PTR (last, (int) i), op);
VUSE_OPS (stmt) = last;
+ VEC_free (tree, heap, new_ops);
}
#ifdef ENABLE_CHECKING
get_expr_operands. FULL_REF is a tree that contains the entire
pointer dereference expression, if available, or NULL otherwise.
OFFSET and SIZE come from the memory access expression that
- generated this virtual operand. FOR_CLOBBER is true is this is
- adding a virtual operand for a call clobber. */
+ generated this virtual operand. IS_CALL_SITE is true if the
+ affected statement is a call site. */
static void
add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
tree full_ref, HOST_WIDE_INT offset,
- HOST_WIDE_INT size, bool for_clobber)
+ HOST_WIDE_INT size, bool is_call_site)
{
- VEC(tree,gc) *aliases;
+ bitmap aliases = NULL;
tree sym;
var_ann_t v_ann;
if (flags & opf_no_vops)
return;
- aliases = v_ann->may_aliases;
+ if (MTAG_P (var))
+ aliases = MTAG_ALIASES (var);
+
if (aliases == NULL)
{
if (s_ann && !gimple_aliases_computed_p (cfun))
s_ann->has_volatile_ops = true;
+
/* The variable is not aliased or it is an alias tag. */
if (flags & opf_def)
append_vdef (var);
}
else
{
- unsigned i;
+ bitmap_iterator bi;
+ unsigned int i;
tree al;
/* The variable is aliased. Add its aliases to the virtual
operands. */
- gcc_assert (VEC_length (tree, aliases) != 0);
+ gcc_assert (!bitmap_empty_p (aliases));
if (flags & opf_def)
{
bool none_added = true;
-
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
{
+ al = referenced_var (i);
if (!access_can_touch_variable (full_ref, al, offset, size))
continue;
-
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
none_added = false;
append_vdef (al);
}
keep the number of these bare defs we add down to the
minimum necessary, we keep track of which SMT's were used
alone in statement vdefs or VUSEs. */
- if (v_ann->is_aliased
- || none_added
+ if (none_added
|| (TREE_CODE (var) == SYMBOL_MEMORY_TAG
- && for_clobber))
+ && is_call_site))
{
append_vdef (var);
}
else
{
bool none_added = true;
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
{
+ al = referenced_var (i);
if (!access_can_touch_variable (full_ref, al, offset, size))
continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
none_added = false;
append_vuse (al);
}
-
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_aliased || none_added)
+
+ /* Even if no aliases have been added, we still need to
+ establish def-use and use-def chains, lest
+ transformations think that this is not a memory
+ reference. For an example of this scenario, see
+ testsuite/g++.dg/opt/cleanup1.C. */
+ if (none_added)
append_vuse (var);
}
}
if (v_ann->symbol_mem_tag)
add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
full_ref, offset, size, false);
- /* Aliasing information is missing; mark statement as volatile so we
- won't optimize it out too actively. */
- else if (s_ann && !gimple_aliases_computed_p (cfun)
+
+ /* Aliasing information is missing; mark statement as
+ volatile so we won't optimize it out too actively. */
+ else if (s_ann
+ && !gimple_aliases_computed_p (cfun)
&& (flags & opf_def))
s_ann->has_volatile_ops = true;
}
if (s_ann)
s_ann->makes_clobbering_call = true;
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
+ /* If we created .GLOBAL_VAR earlier, just use it. */
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_def);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
return;
}
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
- not_read = not_read_b ? bitmap_bit_p (not_read_b,
- DECL_UID (real_var)) : false;
- not_written = not_written_b ? bitmap_bit_p (not_written_b,
- DECL_UID (real_var)) : false;
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
gcc_assert (!unmodifiable_var_p (var));
clobber_stats.clobbered_vars++;
tree call = get_call_expr_in (stmt);
if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
{
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
{
clobber_stats.static_write_clobbers_avoided++;
if (!not_read)
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
else
clobber_stats.static_read_clobbers_avoided++;
}
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
return;
}
continue;
}
- add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
}
}
static void
get_call_expr_operands (tree stmt, tree expr)
{
- tree op;
int call_flags = call_expr_flags (expr);
+ int i, nargs;
stmt_ann_t ann = stmt_ann (stmt);
ann->references_memory = true;
}
/* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
+ get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
+ nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
}
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
- enum tree_code_class class;
+ enum tree_code_class codeclass;
tree expr = *expr_p;
stmt_ann_t s_ann = stmt_ann (stmt);
return;
code = TREE_CODE (expr);
- class = TREE_CODE_CLASS (code);
+ codeclass = TREE_CODE_CLASS (code);
switch (code)
{
return;
}
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ get_expr_operands (stmt, &CHANGE_DYNAMIC_TYPE_LOCATION (expr), opf_use);
+ return;
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
return;
default:
- if (class == tcc_unary)
+ if (codeclass == tcc_unary)
goto do_unary;
- if (class == tcc_binary || class == tcc_comparison)
+ if (codeclass == tcc_binary || codeclass == tcc_comparison)
goto do_binary;
- if (class == tcc_constant || class == tcc_type)
+ if (codeclass == tcc_constant || codeclass == tcc_type)
return;
}
void
copy_virtual_operands (tree dest, tree src)
{
- int i, n;
+ unsigned int i, n;
voptype_p src_vuses, dest_vuses;
voptype_p src_vdefs, dest_vdefs;
struct voptype_d vuse;
if (TREE_CODE (stmt) == PHI_NODE)
return;
- buf = xmalloc (sizeof *buf);
+ buf = XNEW (struct scb_d);
memset (buf, 0, sizeof *buf);
buf->stmt_p = stmt_p;
return stmt_ann (stmt)->references_memory;
}
-
-
-/* Return the memory partition tag (MPT) associated with memory
- symbol SYM. From a correctness standpoint, memory partitions can
- be assigned in any arbitrary fashion as long as this rule is
- observed: Given two memory partitions MPT.i and MPT.j, they must
- not contain symbols in common.
-
- Memory partitions are used when putting the program into Memory-SSA
- form. In particular, in Memory-SSA PHI nodes are not computed for
- individual memory symbols. They are computed for memory
- partitions. This reduces the amount of PHI nodes in the SSA graph
- at the expense of precision (i.e., it makes unrelated stores affect
- each other).
-
- However, it is possible to increase precision by changing this
- partitioning scheme. For instance, if the partitioning scheme is
- such that get_mpt_for is the identity function (that is,
- get_mpt_for (s) = s), this will result in ultimate precision at the
- expense of huge SSA webs.
-
- At the other extreme, a partitioning scheme that groups all the
- symbols in the same set results in minimal SSA webs and almost
- total loss of precision. */
-
-tree
-get_mpt_for (tree sym)
-{
- tree mpt;
-
- /* Don't create a new tag unnecessarily. */
- mpt = memory_partition (sym);
- if (mpt == NULL_TREE)
- {
- mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
- TREE_ADDRESSABLE (mpt) = 0;
- MTAG_GLOBAL (mpt) = 1;
- add_referenced_var (mpt);
- VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
- MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&operands_bitmap_obstack);
- set_memory_partition (sym, mpt);
- }
-
- return mpt;
-}
-
-
-/* Dump memory partition information to FILE. */
-
-void
-dump_memory_partitions (FILE *file)
-{
- unsigned i, npart;
- unsigned long nsyms;
- tree mpt;
-
- fprintf (file, "\nMemory partitions\n\n");
- for (i = 0, npart = 0, nsyms = 0;
- VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
- i++)
- {
- if (mpt)
- {
- bitmap syms = MPT_SYMBOLS (mpt);
- unsigned long n = bitmap_count_bits (syms);
-
- fprintf (file, "#%u: ", i);
- print_generic_expr (file, mpt, 0);
- fprintf (file, ": %lu elements: ", n);
- dump_decl_set (file, syms);
- npart++;
- nsyms += n;
- }
- }
-
- fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
-}
-
-
-/* Dump memory partition information to stderr. */
-
-void
-debug_memory_partitions (void)
-{
- dump_memory_partitions (stderr);
-}