#include "tree.h"
#include "flags.h"
#include "function.h"
-#include "diagnostic.h"
+#include "tree-pretty-print.h"
+#include "gimple-pretty-print.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-#include "toplev.h"
#include "langhooks.h"
-#include "ipa-reference.h"
+#include "diagnostic-core.h"
+
/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
clobbering sites like function calls or ASM_EXPRs. */
#define opf_implicit (1 << 2)
+/* Operand is in a place where address-taken does not imply addressable. */
+#define opf_non_addressable (1 << 3)
+
+/* Operand is in a place where opf_non_addressable does not apply. */
+#define opf_not_non_addressable (1 << 4)
+
/* Array for building all the def operands. */
static VEC(tree,heap) *build_defs;
gcc_unreachable ();
}
- ptr = (struct ssa_operand_memory_d *)
- ggc_alloc (sizeof (void *)
- + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
+ ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
+ + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
ptr->next = gimple_ssa_operands (cfun)->operand_memory;
gimple_ssa_operands (cfun)->operand_memory = ptr;
gimple_ssa_operands (cfun)->operand_memory_index = 0;
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
/* Mark statements with volatile operands. */
- if (TREE_THIS_VOLATILE (sym))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (sym))
gimple_set_has_volatile_ops (stmt, true);
if (is_gimple_reg (sym))
be referenced using pointer arithmetic. See PR 21407 and the
ensuing mailing list discussion. */
var = get_base_address (ref);
- if (var && DECL_P (var))
- TREE_ADDRESSABLE (var) = 1;
+ if (var)
+ {
+ if (DECL_P (var))
+ TREE_ADDRESSABLE (var) = 1;
+ else if (TREE_CODE (var) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
+ TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
+ }
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+/* A subroutine of get_expr_operands to handle MEM_REF.
- STMT is the statement being processed, EXPR is the INDIRECT_REF
+ STMT is the statement being processed, EXPR is the MEM_REF
that got us here.
FLAGS is as in get_expr_operands.
{
tree *pptr = &TREE_OPERAND (expr, 0);
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* Add the VOP. */
/* If requested, add a USE operand for the base pointer. */
if (recurse_on_base)
get_expr_operands (stmt, pptr,
- opf_use | (flags & opf_no_vops));
+ opf_non_addressable | opf_use
+ | (flags & (opf_no_vops|opf_not_non_addressable)));
}
static void
get_tmr_operands (gimple stmt, tree expr, int flags)
{
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* First record the real operands. */
get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
-
- if (TMR_SYMBOL (expr))
- mark_address_taken (TMR_SYMBOL (expr));
+ get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
add_virtual_operand (stmt, flags);
}
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- mark_address_taken (t);
- }
+ mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
}
/* Gather all input operands. */
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- mark_address_taken (t);
- }
+ mark_address_taken (TREE_VALUE (link));
- get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
}
/* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
- for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
- {
- tree link = gimple_asm_clobber_op (stmt, i);
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- add_virtual_operand (stmt, opf_def);
- break;
- }
- }
+ if (gimple_asm_clobbers_memory_p (stmt))
+ add_virtual_operand (stmt, opf_def);
}
reference to it, but the fact that the statement takes its
address will be of interest to some passes (e.g. alias
resolution). */
- if (!is_gimple_debug (stmt))
+ if ((!(flags & opf_non_addressable)
+ || (flags & opf_not_non_addressable))
+ && !is_gimple_debug (stmt))
mark_address_taken (TREE_OPERAND (expr, 0));
/* If the address is invariant, there may be no interesting
here are ARRAY_REF indices which will always be real operands
(GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
+ flags | opf_not_non_addressable);
return;
case SSA_NAME:
gcc_assert (gimple_debug_bind_p (stmt));
return;
- case MISALIGNED_INDIRECT_REF:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- /* fall through */
-
- case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
+ case MEM_REF:
get_indirect_ref_operands (stmt, expr, flags, true);
return;
case REALPART_EXPR:
case IMAGPART_EXPR:
{
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
- if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
gimple_set_has_volatile_ops (stmt, true);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
}
case COND_EXPR:
case VEC_COND_EXPR:
+ case VEC_PERM_EXPR:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
+ /* A volatile constructor is actually TREE_CLOBBER_P, transfer
+ the volatility to the statement, don't use TREE_CLOBBER_P for
+ mirroring the other uses of THIS_VOLATILE in this file. */
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
+
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
}
case BIT_FIELD_REF:
- if (TREE_THIS_VOLATILE (expr))
+ if (!(flags & opf_no_vops)
+ && TREE_THIS_VOLATILE (expr))
gimple_set_has_volatile_ops (stmt, true);
/* FALLTHRU */
- case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
case COMPOUND_EXPR:
case OBJ_TYPE_REF:
case ASSERT_EXPR:
case DOT_PROD_EXPR:
case REALIGN_LOAD_EXPR:
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ case FMA_EXPR:
{
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
- return;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
}
case FUNCTION_DECL:
/* Add call-clobbered operands, if needed. */
if (code == GIMPLE_CALL)
maybe_add_call_vops (stmt);
+
+ if (code == GIMPLE_RETURN)
+ append_vuse (gimple_vop (cfun));
}
}
finalize_ssa_stmt_operands (stmt);
}
+/* Verifies SSA statement operands. */
+
+DEBUG_FUNCTION bool
+verify_ssa_operands (gimple stmt)
+{
+ use_operand_p use_p;
+ def_operand_p def_p;
+ ssa_op_iter iter;
+ unsigned i;
+ tree use, def;
+ bool volatile_p = gimple_has_volatile_ops (stmt);
+
+ /* build_ssa_operands w/o finalizing them. */
+ gimple_set_has_volatile_ops (stmt, false);
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+
+ /* Now verify the built operands are the same as present in STMT. */
+ def = gimple_vdef (stmt);
+ if (def
+ && TREE_CODE (def) == SSA_NAME)
+ def = SSA_NAME_VAR (def);
+ if (build_vdef != def)
+ {
+ error ("virtual definition of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vdef (stmt)
+ && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
+ || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
+ {
+ error ("virtual def operand missing for stmt");
+ return true;
+ }
+
+ use = gimple_vuse (stmt);
+ if (use
+ && TREE_CODE (use) == SSA_NAME)
+ use = SSA_NAME_VAR (use);
+ if (build_vuse != use)
+ {
+ error ("virtual use of statement not up-to-date");
+ return true;
+ }
+ if (gimple_vuse (stmt)
+ && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
+ || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
+ {
+ error ("virtual use operand missing for stmt");
+ return true;
+ }
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
+ {
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ {
+ if (use_p->use == (tree *)use)
+ {
+ VEC_replace (tree, build_uses, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_uses))
+ {
+ error ("excess use operand for stmt");
+ debug_generic_expr (USE_FROM_PTR (use_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ if (use != NULL_TREE)
+ {
+ error ("use operand missing for stmt");
+ debug_generic_expr (*(tree *)use);
+ return true;
+ }
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
+ {
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ {
+ if (def_p == (tree *)def)
+ {
+ VEC_replace (tree, build_defs, i, NULL_TREE);
+ break;
+ }
+ }
+ if (i == VEC_length (tree, build_defs))
+ {
+ error ("excess def operand for stmt");
+ debug_generic_expr (DEF_FROM_PTR (def_p));
+ return true;
+ }
+ }
+ FOR_EACH_VEC_ELT (tree, build_defs, i, def)
+ if (def != NULL_TREE)
+ {
+ error ("def operand missing for stmt");
+ debug_generic_expr (*(tree *)def);
+ return true;
+ }
+
+ if (gimple_has_volatile_ops (stmt) != volatile_p)
+ {
+ error ("stmt volatile flag not up-to-date");
+ return true;
+ }
+
+ cleanup_build_arrays ();
+ return false;
+}
+
/* Releases the operands of STMT back to their freelists, and clears
the stmt operand lists. */
timevar_push (TV_TREE_OPS);
+ /* If the stmt is a noreturn call queue it to be processed by
+ split_bbs_on_noreturn_calls during cfg cleanup. */
+ if (is_gimple_call (stmt)
+ && gimple_call_noreturn_p (stmt))
+ VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
+
gcc_assert (gimple_modified_p (stmt));
build_ssa_operands (stmt);
gimple_set_modified (stmt, false);
/* If the operand cache is active, attempt to preserve the relative
positions of these two operands in their respective immediate use
- lists. */
+ lists by adjusting their use pointer to point to the new
+ operand position. */
if (ssa_operands_active () && op0 != op1)
{
use_optype_p use0, use1, ptr;
break;
}
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we don't need to worry about it. */
- if (use0 && use1)
- {
- tree *tmp = USE_OP_PTR (use1)->use;
- USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
- USE_OP_PTR (use0)->use = tmp;
- }
+ /* And adjust their location to point to the new position of the
+ operand. */
+ if (use0)
+ USE_OP_PTR (use0)->use = exp1;
+ if (use1)
+ USE_OP_PTR (use1)->use = exp0;
}
/* Now swap the data. */
/* Scan the immediate_use list for VAR making sure its linked properly.
Return TRUE if there is a problem and emit an error message to F. */
-bool
+DEBUG_FUNCTION bool
verify_imm_links (FILE *f, tree var)
{
use_operand_p ptr, prev, list;
/* Dump def-use edges on stderr. */
-void
+DEBUG_FUNCTION void
debug_immediate_uses (void)
{
dump_immediate_uses (stderr);
/* Dump def-use edges on stderr. */
-void
+DEBUG_FUNCTION void
debug_immediate_uses_for (tree var)
{
dump_immediate_uses_for (stderr, var);