/* SSA operands management for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
+#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
+#include "langhooks.h"
-/* This file contains the code required to mnage the operands cache of the
+/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
- annotation. This cache contains operands that will be of interets to
+ annotation. This cache contains operands that will be of interest to
optimizers and other passes wishing to manipulate the IL.
The operand type are broken up into REAL and VIRTUAL operands. The real
get_stmt_operands() in the primary entry point.
The operand tree is the parsed by the various get_* routines which look
- through the stmt tree for the occurence of operands which may be of
+ through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
found. There are 5 of these routines, each representing one of the
5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
it contains 'a_5' rather than 'a'.
v_may_def_optype v_may_def_ops;
unsigned size;
size = sizeof (struct v_may_def_optype_d)
- + sizeof (v_may_def_operand_type_t) * (num - 1);
+ + sizeof (v_def_use_operand_type_t) * (num - 1);
v_may_def_ops = ggc_alloc (size);
v_may_def_ops->num_v_may_defs = num;
return v_may_def_ops;
{
v_must_def_optype v_must_def_ops;
unsigned size;
- size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
+ size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
v_must_def_ops = ggc_alloc (size);
v_must_def_ops->num_v_must_defs = num;
return v_must_def_ops;
void
fini_ssa_operands (void)
{
+ ggc_free (build_defs);
+ ggc_free (build_uses);
+ ggc_free (build_v_may_defs);
+ ggc_free (build_vuses);
+ ggc_free (build_v_must_defs);
+ build_defs = NULL;
+ build_uses = NULL;
+ build_v_may_defs = NULL;
+ build_vuses = NULL;
+ build_v_must_defs = NULL;
}
if (num == 0)
return NULL;
-#ifdef ENABLE_CHECKING
/* There should only be a single real definition per assignment. */
- if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
initial call to get_stmt_operands does not pass a pointer to a
statement). */
for (x = 0; x < num; x++)
- if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
- abort ();
+ gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
}
#endif
old_ops = *old_ops_p;
return vuse_ops;
}
-
/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
static v_must_def_optype
if (num == 0)
return NULL;
-#ifdef ENABLE_CHECKING
/* There should only be a single V_MUST_DEF per assignment. */
- if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
old_ops = *old_ops_p;
build_diff = false;
for (x = 0; x < num; x++)
{
- tree var = old_ops->v_must_defs[x];
+ tree var = old_ops->v_must_defs[x].def;
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (var != VARRAY_TREE (build_v_must_defs, x))
/* Look for VAR in the original vector. */
for (i = 0; i < old_num; i++)
{
- result = old_ops->v_must_defs[i];
+ result = old_ops->v_must_defs[i].def;
if (TREE_CODE (result) == SSA_NAME)
result = SSA_NAME_VAR (result);
if (result == var)
{
- v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
+ v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
+ v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
break;
}
}
if (i == old_num)
- v_must_def_ops->v_must_defs[x] = var;
+ {
+ v_must_def_ops->v_must_defs[x].def = var;
+ v_must_def_ops->v_must_defs[x].use = var;
+ }
}
}
VARRAY_POP_ALL (build_v_must_defs);
static inline void
start_ssa_stmt_operands (void)
{
-#ifdef ENABLE_CHECKING
- if (VARRAY_ACTIVE_SIZE (build_defs) > 0
- || VARRAY_ACTIVE_SIZE (build_uses) > 0
- || VARRAY_ACTIVE_SIZE (build_vuses) > 0
- || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
- || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
- abort ();
-#endif
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
+ gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
}
will be destroyed. It is appropriate to call free_stmt_operands() on
the value returned in old_ops.
- The rationale for this: Certain optimizations wish to exmaine the difference
+ The rationale for this: Certain optimizations wish to examine the difference
between new_ops and old_ops after processing. If a set of operands don't
change, new_ops will simply assume the pointer in old_ops, and the old_ops
pointer will be set to NULL, indicating no memory needs to be cleared.
stmt_ann_t ann;
stmt_operands_t old_operands;
-#if defined ENABLE_CHECKING
/* The optimizers cannot handle statements that are nothing but a
_DECL. This indicates a bug in the gimplifier. */
- if (SSA_VAR_P (stmt))
- abort ();
-#endif
+ gcc_assert (!SSA_VAR_P (stmt));
/* Ignore error statements. */
if (TREE_CODE (stmt) == ERROR_MARK)
get_expr_operands (tree stmt, tree *expr_p, int flags)
{
enum tree_code code;
- char class;
+ enum tree_code_class class;
tree expr = *expr_p;
if (expr == NULL || expr == error_mark_node)
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
+ case CONST_DECL:
/* If we found a variable, add it to DEFS or USES depending
on the operand flags. */
add_stmt_operand (expr_p, stmt, flags);
return;
+ case MISALIGNED_INDIRECT_REF:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ /* fall through */
+
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
get_indirect_ref_operands (stmt, expr, flags);
return;
get_call_expr_operands (stmt, expr);
return;
+ case COND_EXPR:
+ case VEC_COND_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ return;
+
case MODIFY_EXPR:
{
int subflags;
return;
}
+ case REALIGN_LOAD_EXPR:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
+ }
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
return;
default:
- if (class == '1')
+ if (class == tcc_unary)
goto do_unary;
- if (class == '2' || class == '<')
+ if (class == tcc_binary || class == tcc_comparison)
goto do_binary;
- if (class == 'c' || class == 't')
+ if (class == tcc_constant || class == tcc_type)
return;
}
/* If we get here, something has gone wrong. */
+#ifdef ENABLE_CHECKING
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
- abort ();
+ internal_error ("internal error");
+#endif
+ gcc_unreachable ();
}
-/* Scan operands in the ASM_EXPR stmt refered to in INFO. */
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
static void
get_asm_expr_operands (tree stmt)
parse_output_constraint (&constraint, i, 0, 0,
&allows_mem, &allows_reg, &is_inout);
-#if defined ENABLE_CHECKING
/* This should have been split in gimplify_asm_expr. */
- if (allows_reg && is_inout)
- abort ();
-#endif
+ gcc_assert (!allows_reg || !is_inout);
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
size_t i;
+ bitmap_iterator bi;
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
if (global_var)
add_stmt_operand (&global_var, stmt, opf_is_def);
else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, stmt, opf_is_def);
- });
+ }
/* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, stmt, opf_is_def);
- });
+ }
break;
}
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF. */
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
static void
get_indirect_ref_operands (tree stmt, tree expr, int flags)
/* Stores into INDIRECT_REF operands are never killing definitions. */
flags &= ~opf_kill_def;
+ if (REF_ORIGINAL (expr))
+ {
+ enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
+
+ /* If we originally accessed part of a structure, we do it still. */
+ if (ocode == ARRAY_REF
+ || ocode == COMPONENT_REF
+ || ocode == REALPART_EXPR
+ || ocode == IMAGPART_EXPR)
+ flags &= ~opf_kill_def;
+ }
+
if (SSA_VAR_P (ptr))
{
struct ptr_info_def *pi = NULL;
/* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
else
- abort ();
+ gcc_unreachable ();
/* Add a USE operand for the base pointer. */
get_expr_operands (stmt, pptr, opf_none);
/* The variable is not aliased or it is an alias tag. */
if (flags & opf_is_def)
{
- if (v_ann->is_alias_tag)
- {
- /* Alias tagged vars get V_MAY_DEF to avoid breaking
- def-def chains with the other variables in their
- alias sets. */
- if (s_ann)
- s_ann->makes_aliased_stores = 1;
- append_v_may_def (var);
- }
- else if (flags & opf_kill_def)
+ if (flags & opf_kill_def)
{
-#if defined ENABLE_CHECKING
/* Only regular variables may get a V_MUST_DEF
operand. */
- if (v_ann->mem_tag_kind != NOT_A_TAG)
- abort ();
-#endif
+ gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
/* V_MUST_DEF for non-aliased, non-GIMPLE register
variable definitions. */
append_v_must_def (var);
/* The variable is aliased. Add its aliases to the virtual
operands. */
-#if defined ENABLE_CHECKING
- if (VARRAY_ACTIVE_SIZE (aliases) == 0)
- abort ();
-#endif
+ gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
if (flags & opf_is_def)
{
else
{
size_t i;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
-
- /* If VAR is read-only, don't add a V_MAY_DEF, just a
- VUSE operand. */
- if (!TREE_READONLY (var))
- add_stmt_operand (&var, stmt, opf_is_def);
- else
+ if (TREE_READONLY (var)
+ && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
add_stmt_operand (&var, stmt, opf_none);
- });
+ else
+ add_stmt_operand (&var, stmt, opf_is_def);
+ }
}
}
static void
add_call_read_ops (tree stmt)
{
+ bitmap_iterator bi;
+
/* Otherwise, if the function is not pure, it may reference memory. Add
a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
for each call-clobbered variable. See add_referenced_var for the
else
{
size_t i;
-
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
+
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, stmt, opf_none);
- });
+ }
}
}
{
*v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
+ {
+ SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
+ SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
+ }
}
}
/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artifical stmt which looks like a load from the store, this can
+ create an artificial stmt which looks like a load from the store, this can
be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which reperesent a load of the
+ store stmt, and NEW_STMT is the new load which represents a load of the
values stored. */
void
free_vuses (&(ann->operands.vuse_ops));
free_v_may_defs (&(ann->operands.v_may_def_ops));
free_v_must_defs (&(ann->operands.v_must_def_ops));
-
+
/* For each VDEF on the original statement, we want to create a
VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
statement. */
for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
{
- op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
+ op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
append_vuse (op);
}