/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
#include "tree.h"
#include "flags.h"
#include "function.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-#include "toplev.h"
#include "langhooks.h"
-#include "ipa-reference.h"
+#include "diagnostic-core.h"
-/* This file contains the code required to manage the operands cache of the
- SSA optimizer. For every stmt, we maintain an operand cache in the stmt
- annotation. This cache contains operands that will be of interest to
- optimizers and other passes wishing to manipulate the IL.
- The operand type are broken up into REAL and VIRTUAL operands. The real
- operands are represented as pointers into the stmt's operand tree. Thus
+/* This file contains the code required to manage the operands cache of the
+ SSA optimizer. For every stmt, we maintain an operand cache in the stmt
+ annotation. This cache contains operands that will be of interest to
+ optimizers and other passes wishing to manipulate the IL.
+
+ The operand type are broken up into REAL and VIRTUAL operands. The real
+ operands are represented as pointers into the stmt's operand tree. Thus
any manipulation of the real operands will be reflected in the actual tree.
- Virtual operands are represented solely in the cache, although the base
- variable for the SSA_NAME may, or may not occur in the stmt's tree.
+ Virtual operands are represented solely in the cache, although the base
+ variable for the SSA_NAME may, or may not occur in the stmt's tree.
Manipulation of the virtual operands will not be reflected in the stmt tree.
- The routines in this file are concerned with creating this operand cache
+ The routines in this file are concerned with creating this operand cache
from a stmt tree.
- The operand tree is the parsed by the various get_* routines which look
- through the stmt tree for the occurrence of operands which may be of
- interest, and calls are made to the append_* routines whenever one is
- found. There are 4 of these routines, each representing one of the
+ The operand tree is the parsed by the various get_* routines which look
+ through the stmt tree for the occurrence of operands which may be of
+ interest, and calls are made to the append_* routines whenever one is
+ found. There are 4 of these routines, each representing one of the
4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
- The append_* routines check for duplication, and simply keep a list of
+ The append_* routines check for duplication, and simply keep a list of
unique objects for each operand type in the build_* extendable vectors.
- Once the stmt tree is completely parsed, the finalize_ssa_operands()
- routine is called, which proceeds to perform the finalization routine
+ Once the stmt tree is completely parsed, the finalize_ssa_operands()
+ routine is called, which proceeds to perform the finalization routine
on each of the 4 operand vectors which have been built up.
- If the stmt had a previous operand cache, the finalization routines
- attempt to match up the new operands with the old ones. If it's a perfect
- match, the old vector is simply reused. If it isn't a perfect match, then
- a new vector is created and the new operands are placed there. For
- virtual operands, if the previous cache had SSA_NAME version of a
- variable, and that same variable occurs in the same operands cache, then
+ If the stmt had a previous operand cache, the finalization routines
+ attempt to match up the new operands with the old ones. If it's a perfect
+ match, the old vector is simply reused. If it isn't a perfect match, then
+ a new vector is created and the new operands are placed there. For
+ virtual operands, if the previous cache had SSA_NAME version of a
+ variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
/* Structure storing statistics on how many call clobbers we have, and
how many where avoided. */
-static struct
+static struct
{
/* Number of call-clobbered ops we attempt to add to calls in
add_call_clobbered_mem_symbols. */
/* Number of reads (VUSEs) avoided by using not_read information. */
unsigned int static_read_clobbers_avoided;
-
+
/* Number of write-clobbers avoided because the variable can't escape to
this call. */
unsigned int unescapable_clobbers_avoided;
/* By default, operands are loaded. */
#define opf_use 0
-/* Operand is the target of an assignment expression or a
+/* Operand is the target of an assignment expression or a
call-clobbered variable. */
#define opf_def (1 << 0)
clobbering sites like function calls or ASM_EXPRs. */
#define opf_implicit (1 << 2)
+/* Operand is in a place where address-taken does not imply addressable. */
+#define opf_non_addressable (1 << 3)
+
+/* Operand is in a place where opf_non_addressable does not apply. */
+#define opf_not_non_addressable (1 << 4)
+
/* Array for building all the def operands. */
static VEC(tree,heap) *build_defs;
/* The built VUSE operand. */
static tree build_vuse;
-/* Bitmap obstack for our datastructures that needs to survive across
+/* Bitmap obstack for our datastructures that needs to survive across
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
-
+
/* Create the VOP variable, an artificial global variable to act as a
representative of all of the virtual operands FUD chain. */
In 1k we can fit 25 use operands (or 63 def operands) on a host with
8 byte pointers, that would be 10 statements each with 1 def and 2
uses. */
-
+
#define OP_SIZE_INIT 0
#define OP_SIZE_1 (1024 - sizeof (void *))
#define OP_SIZE_2 (1024 * 4 - sizeof (void *))
/* Return memory for an operand of size SIZE. */
-
+
static inline void *
ssa_operand_alloc (unsigned size)
{
gcc_unreachable ();
}
- ptr = (struct ssa_operand_memory_d *)
- ggc_alloc (sizeof (void *)
- + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
+ ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
+ + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
ptr->next = gimple_ssa_operands (cfun)->operand_memory;
gimple_ssa_operands (cfun)->operand_memory = ptr;
gimple_ssa_operands (cfun)->operand_memory_index = 0;
/* Adds OP to the list of defs after LAST. */
-static inline def_optype_p
+static inline def_optype_p
add_def_op (tree *op, def_optype_p last)
{
def_optype_p new_def;
/* Now create nodes for all the new nodes. */
for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
- last = add_use_op (stmt,
- (tree *) VEC_index (tree, build_uses, new_i),
+ last = add_use_op (stmt,
+ (tree *) VEC_index (tree, build_uses, new_i),
last);
/* Now set the stmt's operands. */
/* Finalize all the build vectors, fill the new ones into INFO. */
-
+
static inline void
finalize_ssa_stmt_operands (gimple stmt)
{
add_stmt_operand (tree *var_p, gimple stmt, int flags)
{
tree var, sym;
- var_ann_t v_ann;
gcc_assert (SSA_VAR_P (*var_p));
var = *var_p;
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
/* Mark statements with volatile operands. */
- if (TREE_THIS_VOLATILE (sym))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (sym))
gimple_set_has_volatile_ops (stmt, true);
if (is_gimple_reg (sym))
be referenced using pointer arithmetic. See PR 21407 and the
ensuing mailing list discussion. */
var = get_base_address (ref);
- if (var && DECL_P (var))
- TREE_ADDRESSABLE (var) = 1;
+ if (var)
+ {
+ if (DECL_P (var))
+ TREE_ADDRESSABLE (var) = 1;
+ else if (TREE_CODE (var) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
+ TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
+ }
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+/* A subroutine of get_expr_operands to handle MEM_REF.
- STMT is the statement being processed, EXPR is the INDIRECT_REF
+ STMT is the statement being processed, EXPR is the MEM_REF
that got us here.
-
+
FLAGS is as in get_expr_operands.
RECURSE_ON_BASE should be set to true if we want to continue
{
tree *pptr = &TREE_OPERAND (expr, 0);
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* Add the VOP. */
/* If requested, add a USE operand for the base pointer. */
if (recurse_on_base)
get_expr_operands (stmt, pptr,
- opf_use | (flags & opf_no_vops));
+ opf_non_addressable | opf_use
+ | (flags & (opf_no_vops|opf_not_non_addressable)));
}
static void
get_tmr_operands (gimple stmt, tree expr, int flags)
{
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
+
/* First record the real operands. */
get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
-
- if (TMR_SYMBOL (expr))
- mark_address_taken (TMR_SYMBOL (expr));
+ get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
add_virtual_operand (stmt, flags);
}
call-clobbered. */
if (!(call_flags & ECF_NOVOPS))
{
- /* A 'pure' or a 'const' function never call-clobbers anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
add_virtual_operand (stmt, opf_def);
else if (!(call_flags & ECF_CONST))
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- mark_address_taken (t);
- }
+ mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
}
/* Gather all input operands. */
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- mark_address_taken (t);
- }
+ mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
}
/* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
- for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
- {
- tree link = gimple_asm_clobber_op (stmt, i);
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- add_virtual_operand (stmt, opf_def);
- break;
- }
- }
+ if (gimple_asm_clobbers_memory_p (stmt))
+ add_virtual_operand (stmt, opf_def);
}
reference to it, but the fact that the statement takes its
address will be of interest to some passes (e.g. alias
resolution). */
- if (!is_gimple_debug (stmt))
+ if ((!(flags & opf_non_addressable)
+ || (flags & opf_not_non_addressable))
+ && !is_gimple_debug (stmt))
mark_address_taken (TREE_OPERAND (expr, 0));
/* If the address is invariant, there may be no interesting
here are ARRAY_REF indices which will always be real operands
(GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
+ flags | opf_not_non_addressable);
return;
case SSA_NAME:
add_stmt_operand (expr_p, stmt, flags);
return;
- case MISALIGNED_INDIRECT_REF:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- /* fall through */
+ case DEBUG_EXPR_DECL:
+ gcc_assert (gimple_debug_bind_p (stmt));
+ return;
- case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
+ case MEM_REF:
get_indirect_ref_operands (stmt, expr, flags, true);
return;
case REALPART_EXPR:
case IMAGPART_EXPR:
{
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-
+
if (code == COMPONENT_REF)
{
- if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
}
case COND_EXPR:
case VEC_COND_EXPR:
+ case VEC_PERM_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
+ /* A volatile constructor is actually TREE_CLOBBER_P, transfer
+ the volatility to the statement, don't use TREE_CLOBBER_P for
+ mirroring the other uses of THIS_VOLATILE in this file. */
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
+
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
}
case BIT_FIELD_REF:
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* FALLTHRU */
- case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
case COMPOUND_EXPR:
case OBJ_TYPE_REF:
case ASSERT_EXPR:
case DOT_PROD_EXPR:
case REALIGN_LOAD_EXPR:
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ case FMA_EXPR:
{
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
- return;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
}
case FUNCTION_DECL:
parse_ssa_operands (gimple stmt)
{
enum gimple_code code = gimple_code (stmt);
+ size_t i, n, start = 0;
- if (code == GIMPLE_ASM)
- get_asm_expr_operands (stmt);
- else if (is_gimple_debug (stmt))
+ switch (code)
{
+ case GIMPLE_ASM:
+ get_asm_expr_operands (stmt);
+ break;
+
+ case GIMPLE_TRANSACTION:
+ /* The start of a transaction is a memory barrier. */
+ add_virtual_operand (stmt, opf_def | opf_use);
+ break;
+
+ case GIMPLE_DEBUG:
if (gimple_debug_bind_p (stmt)
&& gimple_debug_bind_has_value_p (stmt))
get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
opf_use | opf_no_vops);
- }
- else
- {
- size_t i, start = 0;
-
- if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
- {
- get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
- start = 1;
- }
+ break;
- for (i = start; i < gimple_num_ops (stmt); i++)
- get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
+ case GIMPLE_RETURN:
+ append_vuse (gimple_vop (cfun));
+ goto do_default;
+ case GIMPLE_CALL:
/* Add call-clobbered operands, if needed. */
- if (code == GIMPLE_CALL)
- maybe_add_call_vops (stmt);
+ maybe_add_call_vops (stmt);
+ /* FALLTHRU */
+
+ case GIMPLE_ASSIGN:
+ get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
+ start = 1;
+ /* FALLTHRU */
+
+ default:
+ do_default:
+ n = gimple_num_ops (stmt);
+ for (i = start; i < n; i++)
+ get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
+ break;
}
}
finalize_ssa_stmt_operands (stmt);
}
+/* Verifies SSA statement operands. */
+
+DEBUG_FUNCTION bool
+verify_ssa_operands (gimple stmt)
+{
+ use_operand_p use_p;
+ def_operand_p def_p;
+ ssa_op_iter iter;
+ unsigned i;
+ tree use, def;
+ bool volatile_p = gimple_has_volatile_ops (stmt);
+
+ /* build_ssa_operands w/o finalizing them. */
+ gimple_set_has_volatile_ops (stmt, false);
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+
+ /* Now verify the built operands are the same as present in STMT. */
+ def = gimple_vdef (stmt);
+ if (def
+ && TREE_CODE (def) == SSA_NAME)
+ def = SSA_NAME_VAR (def);
+ if (build_vdef != def)
+ {
+ error ("virtual definition of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vdef (stmt)
+ && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
+ || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
+ {
+ error ("virtual def operand missing for stmt");
+ return true;
+ }
+
+ use = gimple_vuse (stmt);
+ if (use
+ && TREE_CODE (use) == SSA_NAME)
+ use = SSA_NAME_VAR (use);
+ if (build_vuse != use)
+ {
+ error ("virtual use of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vuse (stmt)
+ && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
+ || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
+ {
+ error ("virtual use operand missing for stmt");
+ return true;
+ }
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
+ {
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ {
+ if (use_p->use == (tree *)use)
+ {
+ VEC_replace (tree, build_uses, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_uses))
+ {
+ error ("excess use operand for stmt");
+ debug_generic_expr (USE_FROM_PTR (use_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ if (use != NULL_TREE)
+ {
+ error ("use operand missing for stmt");
+ debug_generic_expr (*(tree *)use);
+ return true;
+ }
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
+ {
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ {
+ if (def_p == (tree *)def)
+ {
+ VEC_replace (tree, build_defs, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_defs))
+ {
+ error ("excess def operand for stmt");
+ debug_generic_expr (DEF_FROM_PTR (def_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ if (def != NULL_TREE)
+ {
+ error ("def operand missing for stmt");
+ debug_generic_expr (*(tree *)def);
+ return true;
+ }
+
+ if (gimple_has_volatile_ops (stmt) != volatile_p)
+ {
+ error ("stmt volatile flag not up-to-date");
+ return true;
+ }
+
+ cleanup_build_arrays ();
+ return false;
+}
+
/* Releases the operands of STMT back to their freelists, and clears
the stmt operand lists. */
timevar_push (TV_TREE_OPS);
+ /* If the stmt is a noreturn call queue it to be processed by
+ split_bbs_on_noreturn_calls during cfg cleanup. */
+ if (is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt))
+ VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
+
gcc_assert (gimple_modified_p (stmt));
build_ssa_operands (stmt);
gimple_set_modified (stmt, false);
/* If the operand cache is active, attempt to preserve the relative
positions of these two operands in their respective immediate use
- lists. */
+ lists by adjusting their use pointer to point to the new
+ operand position. */
if (ssa_operands_active () && op0 != op1)
{
use_optype_p use0, use1, ptr;
break;
}
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we don't need to worry about it. */
- if (use0 && use1)
- {
- tree *tmp = USE_OP_PTR (use1)->use;
- USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
- USE_OP_PTR (use0)->use = tmp;
- }
+ /* And adjust their location to point to the new position of the
+ operand. */
+ if (use0)
+ USE_OP_PTR (use0)->use = exp1;
+ if (use1)
+ USE_OP_PTR (use1)->use = exp0;
}
/* Now swap the data. */
/* Scan the immediate_use list for VAR making sure its linked properly.
Return TRUE if there is a problem and emit an error message to F. */
-bool
+DEBUG_FUNCTION bool
verify_imm_links (FILE *f, tree var)
{
use_operand_p ptr, prev, list;
{
if (prev != ptr->prev)
goto error;
-
+
if (ptr->use == NULL)
goto error; /* 2 roots, or SAFE guard node. */
else if (*(ptr->use) != var)
fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
}
- fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
+ fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
(void *)ptr->use);
print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
fprintf(f, "\n");
/* Dump def-use edges on stderr. */
-void
+DEBUG_FUNCTION void
debug_immediate_uses (void)
{
dump_immediate_uses (stderr);
/* Dump def-use edges on stderr. */
-void
+DEBUG_FUNCTION void
debug_immediate_uses_for (tree var)
{
dump_immediate_uses_for (stderr, var);