#include "ggc.h"
#include "timevar.h"
#include "langhooks.h"
-#include "ipa-reference.h"
+#include "diagnostic-core.h"
+
/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
/* Mark statements with volatile operands. */
- if (TREE_THIS_VOLATILE (sym))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (sym))
gimple_set_has_volatile_ops (stmt, true);
if (is_gimple_reg (sym))
{
tree *pptr = &TREE_OPERAND (expr, 0);
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* Add the VOP. */
static void
get_tmr_operands (gimple stmt, tree expr, int flags)
{
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* First record the real operands. */
}
/* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
- for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
- {
- tree link = gimple_asm_clobber_op (stmt, i);
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- add_virtual_operand (stmt, opf_def);
- break;
- }
- }
+ if (gimple_asm_clobbers_memory_p (stmt))
+ add_virtual_operand (stmt, opf_def);
}
case REALPART_EXPR:
case IMAGPART_EXPR:
{
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
- if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
}
case COND_EXPR:
case VEC_COND_EXPR:
+ case VEC_PERM_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
+ /* A volatile constructor is actually TREE_CLOBBER_P, transfer
+ the volatility to the statement, don't use TREE_CLOBBER_P for
+ mirroring the other uses of THIS_VOLATILE in this file. */
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
+
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
}
case BIT_FIELD_REF:
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* FALLTHRU */
- case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
case COMPOUND_EXPR:
case OBJ_TYPE_REF:
case ASSERT_EXPR:
finalize_ssa_stmt_operands (stmt);
}
+/* Verifies SSA statement operands. */
+
+DEBUG_FUNCTION bool
+verify_ssa_operands (gimple stmt)
+{
+ use_operand_p use_p;
+ def_operand_p def_p;
+ ssa_op_iter iter;
+ unsigned i;
+ tree use, def;
+ bool volatile_p = gimple_has_volatile_ops (stmt);
+
+ /* build_ssa_operands w/o finalizing them. */
+ gimple_set_has_volatile_ops (stmt, false);
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+
+ /* Now verify the built operands are the same as present in STMT. */
+ def = gimple_vdef (stmt);
+ if (def
+ && TREE_CODE (def) == SSA_NAME)
+ def = SSA_NAME_VAR (def);
+ if (build_vdef != def)
+ {
+ error ("virtual definition of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vdef (stmt)
+ && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
+ || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
+ {
+ error ("virtual def operand missing for stmt");
+ return true;
+ }
+
+ use = gimple_vuse (stmt);
+ if (use
+ && TREE_CODE (use) == SSA_NAME)
+ use = SSA_NAME_VAR (use);
+ if (build_vuse != use)
+ {
+ error ("virtual use of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vuse (stmt)
+ && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
+ || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
+ {
+ error ("virtual use operand missing for stmt");
+ return true;
+ }
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
+ {
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ {
+ if (use_p->use == (tree *)use)
+ {
+ VEC_replace (tree, build_uses, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_uses))
+ {
+ error ("excess use operand for stmt");
+ debug_generic_expr (USE_FROM_PTR (use_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ if (use != NULL_TREE)
+ {
+ error ("use operand missing for stmt");
+ debug_generic_expr (*(tree *)use);
+ return true;
+ }
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
+ {
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ {
+ if (def_p == (tree *)def)
+ {
+ VEC_replace (tree, build_defs, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_defs))
+ {
+ error ("excess def operand for stmt");
+ debug_generic_expr (DEF_FROM_PTR (def_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ if (def != NULL_TREE)
+ {
+ error ("def operand missing for stmt");
+ debug_generic_expr (*(tree *)def);
+ return true;
+ }
+
+ if (gimple_has_volatile_ops (stmt) != volatile_p)
+ {
+ error ("stmt volatile flag not up-to-date");
+ return true;
+ }
+
+ cleanup_build_arrays ();
+ return false;
+}
+
/* Releases the operands of STMT back to their freelists, and clears
the stmt operand lists. */
/* If the operand cache is active, attempt to preserve the relative
positions of these two operands in their respective immediate use
- lists. */
+ lists by adjusting their use pointer to point to the new
+ operand position. */
if (ssa_operands_active () && op0 != op1)
{
use_optype_p use0, use1, ptr;
break;
}
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we don't need to worry about it. */
- if (use0 && use1)
- {
- tree *tmp = USE_OP_PTR (use1)->use;
- USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
- USE_OP_PTR (use0)->use = tmp;
- }
+ /* And adjust their location to point to the new position of the
+ operand. */
+ if (use0)
+ USE_OP_PTR (use0)->use = exp1;
+ if (use1)
+ USE_OP_PTR (use1)->use = exp0;
}
/* Now swap the data. */