X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-operands.c;h=7d588f8eb9642b24af7a2ae367e6aed8b8bd2787;hb=ded233a6499245a1806afef6532eadf7c2f88a5c;hp=70b4ea1097239320f244259b2917c40c5f2889a7;hpb=2b99acb823fddd4fcf1586f79e1fa3b149c252bd;p=pf3gnuchains%2Fgcc-fork.git
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index 70b4ea10972..7d588f8eb96 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -1,11 +1,12 @@
/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
@@ -14,9 +15,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+. */
#include "config.h"
#include "system.h"
@@ -53,16 +53,15 @@ Boston, MA 02110-1301, USA. */
The operand tree is the parsed by the various get_* routines which look
through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
- found. There are 5 of these routines, each representing one of the
- 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
- Virtual Must Defs.
+ found. There are 4 of these routines, each representing one of the
+ 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
The append_* routines check for duplication, and simply keep a list of
unique objects for each operand type in the build_* extendable vectors.
Once the stmt tree is completely parsed, the finalize_ssa_operands()
routine is called, which proceeds to perform the finalization routine
- on each of the 5 operand vectors which have been built up.
+ on each of the 4 operand vectors which have been built up.
If the stmt had a previous operand cache, the finalization routines
attempt to match up the new operands with the old ones. If it's a perfect
@@ -72,24 +71,47 @@ Boston, MA 02110-1301, USA. */
variable, and that same variable occurs in the same operands cache, then
the new cache vector will also get the same SSA_NAME.
- i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
- vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'.
+ i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
+ operand vector for VUSE, then the new vector will also be modified
+ such that it contains 'a_5' rather than 'a'. */
-*/
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
+
+static struct
+{
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobbered_mem_symbols. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (VDEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
+
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_mem_symbols. */
+ unsigned int readonly_clobbers;
+
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
-#define opf_none 0
+#define opf_use 0
/* Operand is the target of an assignment expression or a
- call-clobbered variable */
-#define opf_is_def (1 << 0)
-
-/* Operand is the target of an assignment expression. */
-#define opf_kill_def (1 << 1)
+ call-clobbered variable. */
+#define opf_def (1 << 0)
/* No virtual operands should be created in the expression. This is used
when traversing ADDR_EXPR nodes which have different semantics than
@@ -97,12 +119,12 @@ Boston, MA 02110-1301, USA. */
need to consider are indices into arrays. For instance, &a.b[i] should
generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
VUSE for 'b'. */
-#define opf_no_vops (1 << 2)
-
-/* Operand is a "non-specific" kill for call-clobbers and such. This is used
- to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
-#define opf_non_specific (1 << 3)
+#define opf_no_vops (1 << 1)
+/* Operand is an implicit reference. This is used to distinguish
+ explicit assignments in the form of MODIFY_EXPR from
+ clobbering sites like function calls or ASM_EXPRs. */
+#define opf_implicit (1 << 2)
/* Array for building all the def operands. */
static VEC(tree,heap) *build_defs;
@@ -110,51 +132,31 @@ static VEC(tree,heap) *build_defs;
/* Array for building all the use operands. */
static VEC(tree,heap) *build_uses;
-/* Array for building all the v_may_def operands. */
-static VEC(tree,heap) *build_v_may_defs;
-
-/* Array for building all the vuse operands. */
-static VEC(tree,heap) *build_vuses;
-
-/* Array for building all the v_must_def operands. */
-static VEC(tree,heap) *build_v_must_defs;
-
-
-/* These arrays are the cached operand vectors for call clobbered calls. */
-static bool ops_active = false;
-
-static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
-static unsigned operand_memory_index;
-
-static void get_expr_operands (tree, tree *, int);
-static void get_asm_expr_operands (tree);
-static void get_indirect_ref_operands (tree, tree, int, tree, HOST_WIDE_INT,
- HOST_WIDE_INT, bool);
-static void get_tmr_operands (tree, tree, int);
-static void get_call_expr_operands (tree, tree);
-static inline void append_def (tree *);
-static inline void append_use (tree *);
-static void append_v_may_def (tree);
-static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree, tree);
-static void add_call_read_ops (tree, tree);
-static void add_stmt_operand (tree *, stmt_ann_t, int);
-static void add_virtual_operand (tree, stmt_ann_t, int, tree,
- HOST_WIDE_INT, HOST_WIDE_INT,
- bool);
-static void build_ssa_operands (tree stmt);
-
-static def_optype_p free_defs = NULL;
-static use_optype_p free_uses = NULL;
-static vuse_optype_p free_vuses = NULL;
-static maydef_optype_p free_maydefs = NULL;
-static mustdef_optype_p free_mustdefs = NULL;
+/* The built VDEF operand. */
+static tree build_vdef;
+
+/* The built VUSE operand. */
+static tree build_vuse;
+/* Bitmap obstack for our datastructures that needs to survive across
+ compilations of multiple functions. */
+static bitmap_obstack operands_bitmap_obstack;
+
+static void get_expr_operands (gimple, tree *, int);
+
+/* Number of functions with initialized ssa_operands. */
+static int n_initialized = 0;
+
+/* Stack of statements to change. Every call to
+ push_stmt_changes pushes the stmt onto the stack. Calls to
+ pop_stmt_changes pop a stmt off of the stack and compute the set
+ of changes for the popped statement. */
+static VEC(gimple_p,heap) *scb_stack;
/* Return the DECL_UID of the base variable of T. */
static inline unsigned
-get_name_decl (tree t)
+get_name_decl (const_tree t)
{
if (TREE_CODE (t) != SSA_NAME)
return DECL_UID (t);
@@ -162,106 +164,83 @@ get_name_decl (tree t)
return DECL_UID (SSA_NAME_VAR (t));
}
-/* Comparison function for qsort used in operand_build_sort_virtual. */
-
-static int
-operand_build_cmp (const void *p, const void *q)
-{
- tree e1 = *((const tree *)p);
- tree e2 = *((const tree *)q);
- unsigned int u1,u2;
-
- u1 = get_name_decl (e1);
- u2 = get_name_decl (e2);
-
- /* We want to sort in ascending order. They can never be equal. */
-#ifdef ENABLE_CHECKING
- gcc_assert (u1 != u2);
-#endif
- return (u1 > u2 ? 1 : -1);
-}
-
-/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
-
-static inline void
-operand_build_sort_virtual (VEC(tree,heap) *list)
-{
- int num = VEC_length (tree, list);
- if (num < 2)
- return;
- if (num == 2)
- {
- if (get_name_decl (VEC_index (tree, list, 0))
- > get_name_decl (VEC_index (tree, list, 1)))
- {
- /* Swap elements if in the wrong order. */
- tree tmp = VEC_index (tree, list, 0);
- VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
- VEC_replace (tree, list, 1, tmp);
- }
- return;
- }
- /* There are 3 or more elements, call qsort. */
- qsort (VEC_address (tree, list),
- VEC_length (tree, list),
- sizeof (tree),
- operand_build_cmp);
-}
-
-
-/* Return true if the ssa operands cache is active. */
+/* Return true if the SSA operands cache is active. */
bool
ssa_operands_active (void)
{
- return ops_active;
+ /* This function may be invoked from contexts where CFUN is NULL
+ (IPA passes), return false for now. FIXME: operands may be
+ active in each individual function, maybe this function should
+ take CFUN as a parameter. */
+ if (cfun == NULL)
+ return false;
+
+ return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
-/* Structure storing statistics on how many call clobbers we have, and
- how many where avoided. */
-static struct
-{
- /* Number of call-clobbered ops we attempt to add to calls in
- add_call_clobber_ops. */
- unsigned int clobbered_vars;
+
+/* Create the VOP variable, an artificial global variable to act as a
+ representative of all of the virtual operands FUD chain. */
- /* Number of write-clobbers (v_may_defs) avoided by using
- not_written information. */
- unsigned int static_write_clobbers_avoided;
+static void
+create_vop_var (void)
+{
+ tree global_var;
+
+ gcc_assert (cfun->gimple_df->vop == NULL_TREE);
+
+ global_var = build_decl (VAR_DECL, get_identifier (".MEM"),
+ void_type_node);
+ DECL_ARTIFICIAL (global_var) = 1;
+ TREE_READONLY (global_var) = 0;
+ DECL_EXTERNAL (global_var) = 1;
+ TREE_STATIC (global_var) = 1;
+ TREE_USED (global_var) = 1;
+ DECL_CONTEXT (global_var) = NULL_TREE;
+ TREE_THIS_VOLATILE (global_var) = 0;
+ TREE_ADDRESSABLE (global_var) = 0;
+
+ create_var_ann (global_var);
+ add_referenced_var (global_var);
+ cfun->gimple_df->vop = global_var;
+}
- /* Number of reads (vuses) avoided by using not_read
- information. */
- unsigned int static_read_clobbers_avoided;
+/* These are the sizes of the operand memory buffer in bytes which gets
+ allocated each time more operands space is required. The final value is
+ the amount that is allocated every time after that.
+ In 1k we can fit 25 use operands (or 63 def operands) on a host with
+ 8 byte pointers, that would be 10 statements each with 1 def and 2
+ uses. */
- /* Number of write-clobbers avoided because the variable can't escape to
- this call. */
- unsigned int unescapable_clobbers_avoided;
-
- /* Number of readonly uses we attempt to add to calls in
- add_call_read_ops. */
- unsigned int readonly_clobbers;
+#define OP_SIZE_INIT 0
+#define OP_SIZE_1 (1024 - sizeof (void *))
+#define OP_SIZE_2 (1024 * 4 - sizeof (void *))
+#define OP_SIZE_3 (1024 * 16 - sizeof (void *))
- /* Number of readonly uses we avoid using not_read information. */
- unsigned int static_readonly_clobbers_avoided;
-} clobber_stats;
-
/* Initialize the operand cache routines. */
void
init_ssa_operands (void)
{
- build_defs = VEC_alloc (tree, heap, 5);
- build_uses = VEC_alloc (tree, heap, 10);
- build_vuses = VEC_alloc (tree, heap, 25);
- build_v_may_defs = VEC_alloc (tree, heap, 25);
- build_v_must_defs = VEC_alloc (tree, heap, 25);
-
- gcc_assert (operand_memory == NULL);
- operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
- ops_active = true;
+ if (!n_initialized++)
+ {
+ build_defs = VEC_alloc (tree, heap, 5);
+ build_uses = VEC_alloc (tree, heap, 10);
+ build_vuse = NULL_TREE;
+ build_vdef = NULL_TREE;
+ bitmap_obstack_initialize (&operands_bitmap_obstack);
+ scb_stack = VEC_alloc (gimple_p, heap, 20);
+ }
+
+ gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gimple_ssa_operands (cfun)->operand_memory_index
+ = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
+ gimple_ssa_operands (cfun)->ops_active = true;
memset (&clobber_stats, 0, sizeof (clobber_stats));
-
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
+ create_vop_var ();
}
@@ -271,381 +250,326 @@ void
fini_ssa_operands (void)
{
struct ssa_operand_memory_d *ptr;
- VEC_free (tree, heap, build_defs);
- VEC_free (tree, heap, build_uses);
- VEC_free (tree, heap, build_v_must_defs);
- VEC_free (tree, heap, build_v_may_defs);
- VEC_free (tree, heap, build_vuses);
- free_defs = NULL;
- free_uses = NULL;
- free_vuses = NULL;
- free_maydefs = NULL;
- free_mustdefs = NULL;
- while ((ptr = operand_memory) != NULL)
+
+ if (!--n_initialized)
+ {
+ VEC_free (tree, heap, build_defs);
+ VEC_free (tree, heap, build_uses);
+ build_vdef = NULL_TREE;
+ build_vuse = NULL_TREE;
+
+ /* The change buffer stack had better be empty. */
+ gcc_assert (VEC_length (gimple_p, scb_stack) == 0);
+ VEC_free (gimple_p, heap, scb_stack);
+ scb_stack = NULL;
+ }
+
+ gimple_ssa_operands (cfun)->free_defs = NULL;
+ gimple_ssa_operands (cfun)->free_uses = NULL;
+
+ while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
{
- operand_memory = operand_memory->next;
+ gimple_ssa_operands (cfun)->operand_memory
+ = gimple_ssa_operands (cfun)->operand_memory->next;
ggc_free (ptr);
}
- ops_active = false;
-
+ gimple_ssa_operands (cfun)->ops_active = false;
+
+ if (!n_initialized)
+ bitmap_obstack_release (&operands_bitmap_obstack);
+
+ cfun->gimple_df->vop = NULL_TREE;
+
if (dump_file && (dump_flags & TDF_STATS))
{
- fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars);
- fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided);
- fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided);
- fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided);
- fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers);
- fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided);
+ fprintf (dump_file, "Original clobbered vars: %d\n",
+ clobber_stats.clobbered_vars);
+ fprintf (dump_file, "Static write clobbers avoided: %d\n",
+ clobber_stats.static_write_clobbers_avoided);
+ fprintf (dump_file, "Static read clobbers avoided: %d\n",
+ clobber_stats.static_read_clobbers_avoided);
+ fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
+ clobber_stats.unescapable_clobbers_avoided);
+ fprintf (dump_file, "Original read-only clobbers: %d\n",
+ clobber_stats.readonly_clobbers);
+ fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
+ clobber_stats.static_readonly_clobbers_avoided);
}
}
-/* Return memory for operands of SIZE chunks. */
+/* Return memory for an operand of size SIZE. */
static inline void *
ssa_operand_alloc (unsigned size)
{
char *ptr;
- if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
+
+ gcc_assert (size == sizeof (struct use_optype_d)
+ || size == sizeof (struct def_optype_d));
+
+ if (gimple_ssa_operands (cfun)->operand_memory_index + size
+ >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
{
struct ssa_operand_memory_d *ptr;
- ptr = GGC_NEW (struct ssa_operand_memory_d);
- ptr->next = operand_memory;
- operand_memory = ptr;
- operand_memory_index = 0;
+
+ switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ {
+ case OP_SIZE_INIT:
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
+ break;
+ case OP_SIZE_1:
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
+ break;
+ case OP_SIZE_2:
+ case OP_SIZE_3:
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ ptr = (struct ssa_operand_memory_d *)
+ ggc_alloc (sizeof (void *)
+ + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+ ptr->next = gimple_ssa_operands (cfun)->operand_memory;
+ gimple_ssa_operands (cfun)->operand_memory = ptr;
+ gimple_ssa_operands (cfun)->operand_memory_index = 0;
}
- ptr = &(operand_memory->mem[operand_memory_index]);
- operand_memory_index += size;
+
+ ptr = &(gimple_ssa_operands (cfun)->operand_memory
+ ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
+ gimple_ssa_operands (cfun)->operand_memory_index += size;
return ptr;
}
-/* Make sure PTR is in the correct immediate use list. Since uses are simply
- pointers into the stmt TREE, there is no way of telling if anyone has
- changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
- The contents are different, but the pointer is still the same. This
- routine will check to make sure PTR is in the correct list, and if it isn't
- put it in the correct list. We cannot simply check the previous node
- because all nodes in the same stmt might have be changed. */
+/* Allocate a DEF operand. */
-static inline void
-correct_use_link (use_operand_p ptr, tree stmt)
+static inline struct def_optype_d *
+alloc_def (void)
{
- use_operand_p prev;
- tree root;
+ struct def_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_defs)
+ {
+ ret = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs
+ = gimple_ssa_operands (cfun)->free_defs->next;
+ }
+ else
+ ret = (struct def_optype_d *)
+ ssa_operand_alloc (sizeof (struct def_optype_d));
+ return ret;
+}
+
- /* Fold_stmt () may have changed the stmt pointers. */
- if (ptr->stmt != stmt)
- ptr->stmt = stmt;
+/* Allocate a USE operand. */
- prev = ptr->prev;
- if (prev)
+static inline struct use_optype_d *
+alloc_use (void)
+{
+ struct use_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_uses)
{
- /* Find the root element, making sure we skip any safe iterators. */
- while (prev->use != NULL || prev->stmt == NULL)
- prev = prev->prev;
-
- /* Get the ssa_name of the list the node is in. */
- root = prev->stmt;
- /* If it's the right list, simply return. */
- if (root == *(ptr->use))
- return;
+ ret = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses
+ = gimple_ssa_operands (cfun)->free_uses->next;
}
- /* Its in the wrong list if we reach here. */
- delink_imm_use (ptr);
- link_imm_use (ptr, *(ptr->use));
+ else
+ ret = (struct use_optype_d *)
+ ssa_operand_alloc (sizeof (struct use_optype_d));
+ return ret;
}
-/* This routine makes sure that PTR is in an immediate use list, and makes
- sure the stmt pointer is set to the current stmt. Virtual uses do not need
- the overhead of correct_use_link since they cannot be directly manipulated
- like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
-static inline void
-set_virtual_use_link (use_operand_p ptr, tree stmt)
+/* Adds OP to the list of defs after LAST. */
+
+static inline def_optype_p
+add_def_op (tree *op, def_optype_p last)
{
- /* Fold_stmt () may have changed the stmt pointers. */
- if (ptr->stmt != stmt)
- ptr->stmt = stmt;
+ def_optype_p new_def;
- /* If this use isn't in a list, add it to the correct list. */
- if (!ptr->prev)
- link_imm_use (ptr, *(ptr->use));
+ new_def = alloc_def ();
+ DEF_OP_PTR (new_def) = op;
+ last->next = new_def;
+ new_def->next = NULL;
+ return new_def;
}
+/* Adds OP to the list of uses of statement STMT after LAST. */
-#define FINALIZE_OPBUILD build_defs
-#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
- build_defs, (I))
-#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
- build_defs, (I))
-#define FINALIZE_FUNC finalize_ssa_def_ops
-#define FINALIZE_ALLOC alloc_def
-#define FINALIZE_FREE free_defs
-#define FINALIZE_TYPE struct def_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
-#define FINALIZE_OPS DEF_OPS
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
-#include "tree-ssa-opfinalize.h"
+static inline use_optype_p
+add_use_op (gimple stmt, tree *op, use_optype_p last)
+{
+ use_optype_p new_use;
+
+ new_use = alloc_use ();
+ USE_OP_PTR (new_use)->use = op;
+ link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
+ last->next = new_use;
+ new_use->next = NULL;
+ return new_use;
+}
-/* This routine will create stmt operands for STMT from the def build list. */
-static void
-finalize_ssa_defs (tree stmt)
+/* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Make build_defs VEC of tree *. */
+
+static inline void
+finalize_ssa_defs (gimple stmt)
{
+ unsigned new_i;
+ struct def_optype_d new_list;
+ def_optype_p old_ops, last;
unsigned int num = VEC_length (tree, build_defs);
- /* There should only be a single real definition per assignment. */
- gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
- /* If there is an old list, often the new list is identical, or close, so
- find the elements at the beginning that are the same as the vector. */
+ /* There should only be a single real definition per assignment. */
+ gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
- finalize_ssa_def_ops (stmt);
- VEC_truncate (tree, build_defs, 0);
-}
+ /* Pre-pend the vdef we may have built. */
+ if (build_vdef != NULL_TREE)
+ {
+ tree oldvdef = gimple_vdef (stmt);
+ if (oldvdef
+ && TREE_CODE (oldvdef) == SSA_NAME)
+ oldvdef = SSA_NAME_VAR (oldvdef);
+ if (oldvdef != build_vdef)
+ gimple_set_vdef (stmt, build_vdef);
+ VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
+ ++num;
+ }
-#define FINALIZE_OPBUILD build_uses
-#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
- build_uses, (I))
-#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
- build_uses, (I))
-#define FINALIZE_FUNC finalize_ssa_use_ops
-#define FINALIZE_ALLOC alloc_use
-#define FINALIZE_FREE free_uses
-#define FINALIZE_TYPE struct use_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
-#define FINALIZE_OPS USE_OPS
-#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE correct_use_link
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_ptr.use = (VAL); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- *(VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
-
-static void
-finalize_ssa_uses (tree stmt)
-{
-#ifdef ENABLE_CHECKING
- {
- unsigned x;
- unsigned num = VEC_length (tree, build_uses);
-
- /* If the pointer to the operand is the statement itself, something is
- wrong. It means that we are pointing to a local variable (the
- initial call to get_stmt_operands does not pass a pointer to a
- statement). */
- for (x = 0; x < num; x++)
- gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
- }
-#endif
- finalize_ssa_use_ops (stmt);
- VEC_truncate (tree, build_uses, 0);
-}
-
-
-/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
-#define FINALIZE_OPBUILD build_v_may_defs
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_v_may_defs, (I)))
-#define FINALIZE_FUNC finalize_ssa_v_may_def_ops
-#define FINALIZE_ALLOC alloc_maydef
-#define FINALIZE_FREE free_maydefs
-#define FINALIZE_TYPE struct maydef_optype_d
-#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
-#define FINALIZE_OPS MAYDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-
-static void
-finalize_ssa_v_may_defs (tree stmt)
-{
- finalize_ssa_v_may_def_ops (stmt);
-}
-
+ new_list.next = NULL;
+ last = &new_list;
-/* Clear the in_list bits and empty the build array for v_may_defs. */
+ old_ops = gimple_def_ops (stmt);
-static inline void
-cleanup_v_may_defs (void)
-{
- unsigned x, num;
- num = VEC_length (tree, build_v_may_defs);
+ new_i = 0;
- for (x = 0; x < num; x++)
+ /* Clear and unlink a no longer necessary VDEF. */
+ if (build_vdef == NULL_TREE
+ && gimple_vdef (stmt) != NULL_TREE)
{
- tree t = VEC_index (tree, build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
+ if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
{
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ unlink_stmt_vdef (stmt);
+ release_ssa_name (gimple_vdef (stmt));
}
+ gimple_set_vdef (stmt, NULL_TREE);
}
- VEC_truncate (tree, build_v_may_defs, 0);
-}
-
-#define FINALIZE_OPBUILD build_vuses
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_vuses, (I)))
-#define FINALIZE_FUNC finalize_ssa_vuse_ops
-#define FINALIZE_ALLOC alloc_vuse
-#define FINALIZE_FREE free_vuses
-#define FINALIZE_TYPE struct vuse_optype_d
-#define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
-#define FINALIZE_OPS VUSE_OPS
-#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-
-/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
-
-static void
-finalize_ssa_vuses (tree stmt)
+ /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
+ if (gimple_vdef (stmt)
+ && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
+ mark_sym_for_renaming (gimple_vdef (stmt));
+
+ /* Check for the common case of 1 def that hasn't changed. */
+ if (old_ops && old_ops->next == NULL && num == 1
+ && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
+ return;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ old_ops->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = old_ops;
+ }
+
+ /* If there is anything remaining in the build_defs list, simply emit it. */
+ for ( ; new_i < num; new_i++)
+ last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
+
+ /* Now set the stmt's operands. */
+ gimple_set_def_ops (stmt, new_list.next);
+}
+
+
+/* Takes elements from build_uses and turns them into use operands of STMT.
+ TODO -- Make build_uses VEC of tree *. */
+
+static inline void
+finalize_ssa_uses (gimple stmt)
{
- unsigned num, num_v_may_defs;
- unsigned vuse_index;
+ unsigned new_i;
+ struct use_optype_d new_list;
+ use_optype_p old_ops, ptr, last;
- /* Remove superfluous VUSE operands. If the statement already has a
- V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
- needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
- suppose that variable 'a' is aliased:
+ /* Pre-pend the VUSE we may have built. */
+ if (build_vuse != NULL_TREE)
+ {
+ tree oldvuse = gimple_vuse (stmt);
+ if (oldvuse
+ && TREE_CODE (oldvuse) == SSA_NAME)
+ oldvuse = SSA_NAME_VAR (oldvuse);
+ if (oldvuse != (build_vuse != NULL_TREE
+ ? build_vuse : build_vdef))
+ gimple_set_vuse (stmt, NULL_TREE);
+ VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
+ }
- # VUSE
- # a_3 = V_MAY_DEF
- a = a + 1;
+ new_list.next = NULL;
+ last = &new_list;
- The VUSE is superfluous because it is implied by the V_MAY_DEF
- operation. */
+ old_ops = gimple_use_ops (stmt);
- num = VEC_length (tree, build_vuses);
- num_v_may_defs = VEC_length (tree, build_v_may_defs);
+ /* Clear a no longer necessary VUSE. */
+ if (build_vuse == NULL_TREE
+ && gimple_vuse (stmt) != NULL_TREE)
+ gimple_set_vuse (stmt, NULL_TREE);
- if (num > 0 && num_v_may_defs > 0)
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
- {
- tree vuse;
- vuse = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (vuse) != SSA_NAME)
- {
- var_ann_t ann = var_ann (vuse);
- ann->in_vuse_list = 0;
- if (ann->in_v_may_def_list)
- {
- VEC_ordered_remove (tree, build_vuses, vuse_index);
- continue;
- }
- }
- vuse_index++;
- }
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (USE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = old_ops;
}
- else
- /* Clear out the in_list bits. */
- for (vuse_index = 0;
- vuse_index < VEC_length (tree, build_vuses);
- vuse_index++)
- {
- tree t = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
- finalize_ssa_vuse_ops (stmt);
- /* The v_may_def build vector wasn't cleaned up because we needed it. */
- cleanup_v_may_defs ();
-
- /* Free the vuses build vector. */
- VEC_truncate (tree, build_vuses, 0);
+ /* If we added a VUSE, make sure to set the operand if it is not already
+ present and mark it for renaming. */
+ if (build_vuse != NULL_TREE
+ && gimple_vuse (stmt) == NULL_TREE)
+ {
+ gimple_set_vuse (stmt, gimple_vop (cfun));
+ mark_sym_for_renaming (gimple_vop (cfun));
+ }
+
+ /* Now create nodes for all the new nodes. */
+ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ last = add_use_op (stmt,
+ (tree *) VEC_index (tree, build_uses, new_i),
+ last);
+ /* Now set the stmt's operands. */
+ gimple_set_use_ops (stmt, new_list.next);
}
-
-/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
-
-#define FINALIZE_OPBUILD build_v_must_defs
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_v_must_defs, (I)))
-#define FINALIZE_FUNC finalize_ssa_v_must_def_ops
-#define FINALIZE_ALLOC alloc_mustdef
-#define FINALIZE_FREE free_mustdefs
-#define FINALIZE_TYPE struct mustdef_optype_d
-#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
-#define FINALIZE_OPS MUSTDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->kill_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->kill_var);\
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-static void
-finalize_ssa_v_must_defs (tree stmt)
+/* Clear the in_list bits and empty the build array for VDEFs and
+ VUSEs. */
+
+static inline void
+cleanup_build_arrays (void)
{
- /* In the presence of subvars, there may be more than one V_MUST_DEF per
- statement (one for each subvar). It is a bit expensive to verify that
- all must-defs in a statement belong to subvars if there is more than one
- MUST-def, so we don't do it. Suffice to say, if you reach here without
- having subvars, and have num >1, you have hit a bug. */
-
- finalize_ssa_v_must_def_ops (stmt);
- VEC_truncate (tree, build_v_must_defs, 0);
+ build_vdef = NULL_TREE;
+ build_vuse = NULL_TREE;
+ VEC_truncate (tree, build_defs, 0);
+ VEC_truncate (tree, build_uses, 0);
}
/* Finalize all the build vectors, fill the new ones into INFO. */
static inline void
-finalize_ssa_stmt_operands (tree stmt)
+finalize_ssa_stmt_operands (gimple stmt)
{
finalize_ssa_defs (stmt);
finalize_ssa_uses (stmt);
- finalize_ssa_v_must_defs (stmt);
- finalize_ssa_v_may_defs (stmt);
- finalize_ssa_vuses (stmt);
+ cleanup_build_arrays ();
}
@@ -656,9 +580,8 @@ start_ssa_stmt_operands (void)
{
gcc_assert (VEC_length (tree, build_defs) == 0);
gcc_assert (VEC_length (tree, build_uses) == 0);
- gcc_assert (VEC_length (tree, build_vuses) == 0);
- gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
- gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
+ gcc_assert (build_vuse == NULL_TREE);
+ gcc_assert (build_vdef == NULL_TREE);
}
@@ -667,7 +590,7 @@ start_ssa_stmt_operands (void)
static inline void
append_def (tree *def_p)
{
- VEC_safe_push (tree, heap, build_defs, (tree)def_p);
+ VEC_safe_push (tree, heap, build_defs, (tree) def_p);
}
@@ -676,579 +599,368 @@ append_def (tree *def_p)
static inline void
append_use (tree *use_p)
{
- VEC_safe_push (tree, heap, build_uses, (tree)use_p);
+ VEC_safe_push (tree, heap, build_uses, (tree) use_p);
}
-/* Add a new virtual may def for variable VAR to the build array. */
+/* Add VAR to the set of variables that require a VDEF operator. */
static inline void
-append_v_may_def (tree var)
+append_vdef (tree var)
{
- if (TREE_CODE (var) != SSA_NAME)
- {
- var_ann_t ann = get_var_ann (var);
+ if (!optimize)
+ return;
- /* Don't allow duplicate entries. */
- if (ann->in_v_may_def_list)
- return;
- ann->in_v_may_def_list = 1;
- }
+ gcc_assert ((build_vdef == NULL_TREE
+ || build_vdef == var)
+ && (build_vuse == NULL_TREE
+ || build_vuse == var));
- VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
+ build_vdef = var;
+ build_vuse = var;
}
-/* Add VAR to the list of virtual uses. */
+/* Add VAR to the set of variables that require a VUSE operator. */
static inline void
append_vuse (tree var)
{
+ if (!optimize)
+ return;
- /* Don't allow duplicate entries. */
- if (TREE_CODE (var) != SSA_NAME)
- {
- var_ann_t ann = get_var_ann (var);
-
- if (ann->in_vuse_list || ann->in_v_may_def_list)
- return;
- ann->in_vuse_list = 1;
- }
+ gcc_assert (build_vuse == NULL_TREE
+ || build_vuse == var);
- VEC_safe_push (tree, heap, build_vuses, (tree)var);
+ build_vuse = var;
}
+/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
-/* Add VAR to the list of virtual must definitions for INFO. */
-
-static inline void
-append_v_must_def (tree var)
+static void
+add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
{
- unsigned i;
-
- /* Don't allow duplicate entries. */
- for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
- if (var == VEC_index (tree, build_v_must_defs, i))
- return;
+ /* Add virtual operands to the stmt, unless the caller has specifically
+ requested not to do that (used when adding operands inside an
+ ADDR_EXPR expression). */
+ if (flags & opf_no_vops)
+ return;
- VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
+ if (flags & opf_def)
+ append_vdef (gimple_vop (cfun));
+ else
+ append_vuse (gimple_vop (cfun));
}
-/* Parse STMT looking for operands. OLD_OPS is the original stmt operand
- cache for STMT, if it existed before. When finished, the various build_*
- operand vectors will have potential operands. in them. */
-
+/* Add *VAR_P to the appropriate operand array for statement STMT.
+ FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
+ it will be added to the statement's real operands, otherwise it is
+ added to virtual operands. */
+
static void
-parse_ssa_operands (tree stmt)
+add_stmt_operand (tree *var_p, gimple stmt, int flags)
{
- enum tree_code code;
+ tree var, sym;
+ var_ann_t v_ann;
- code = TREE_CODE (stmt);
- switch (code)
- {
- case MODIFY_EXPR:
- /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
- either only part of LHS is modified or if the RHS might throw,
- otherwise, use V_MUST_DEF.
+ gcc_assert (SSA_VAR_P (*var_p));
- ??? If it might throw, we should represent somehow that it is killed
- on the fallthrough path. */
- {
- tree lhs = TREE_OPERAND (stmt, 0);
- int lhs_flags = opf_is_def;
+ var = *var_p;
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
+ gimple_set_has_volatile_ops (stmt, true);
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
+ if (is_gimple_reg (sym))
+ {
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ add_virtual_operand (stmt, flags);
+}
- /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
- or not the entire LHS is modified; that depends on what's
- inside the VIEW_CONVERT_EXPR. */
- if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
- lhs = TREE_OPERAND (lhs, 0);
+/* Mark the base address of REF as having its address taken.
+ REF may be a single variable whose address has been taken or any
+ other valid GIMPLE memory reference (structure reference, array,
+ etc). */
- if (TREE_CODE (lhs) != ARRAY_RANGE_REF
- && TREE_CODE (lhs) != BIT_FIELD_REF)
- lhs_flags |= opf_kill_def;
+static void
+mark_address_taken (tree ref)
+{
+ tree var;
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
- }
- break;
+ /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
+ as the only thing we take the address of. If VAR is a structure,
+ taking the address of a field means that the whole structure may
+ be referenced using pointer arithmetic. See PR 21407 and the
+ ensuing mailing list discussion. */
+ var = get_base_address (ref);
+ if (var && DECL_P (var))
+ TREE_ADDRESSABLE (var) = 1;
+}
- case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
- break;
- case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
- break;
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
- case ASM_EXPR:
- get_asm_expr_operands (stmt);
- break;
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
- case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
- break;
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
- case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
- break;
+static void
+get_indirect_ref_operands (gimple stmt, tree expr, int flags,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
- case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
- break;
+ if (TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
- /* These nodes contain no variable references. */
- case BIND_EXPR:
- case CASE_LABEL_EXPR:
- case TRY_CATCH_EXPR:
- case TRY_FINALLY_EXPR:
- case EH_FILTER_EXPR:
- case CATCH_EXPR:
- case RESX_EXPR:
- break;
+ /* Add the VOP. */
+ add_virtual_operand (stmt, flags);
- default:
- /* Notice that if get_expr_operands tries to use &STMT as the operand
- pointer (which may only happen for USE operands), we will fail in
- append_use. This default will handle statements like empty
- statements, or CALL_EXPRs that may appear on the RHS of a statement
- or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
- break;
- }
+ /* If requested, add a USE operand for the base pointer. */
+ if (recurse_on_base)
+ get_expr_operands (stmt, pptr, opf_use);
}
-/* Create an operands cache for STMT. */
+
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
-build_ssa_operands (tree stmt)
+get_tmr_operands (gimple stmt, tree expr, int flags)
{
- stmt_ann_t ann = get_stmt_ann (stmt);
-
- /* Initially assume that the statement has no volatile operands. */
- if (ann)
- ann->has_volatile_ops = false;
-
- start_ssa_stmt_operands ();
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
- parse_ssa_operands (stmt);
- operand_build_sort_virtual (build_vuses);
- operand_build_sort_virtual (build_v_may_defs);
- operand_build_sort_virtual (build_v_must_defs);
+ if (TMR_SYMBOL (expr))
+ mark_address_taken (TMR_SYMBOL (expr));
- finalize_ssa_stmt_operands (stmt);
+ add_virtual_operand (stmt, flags);
}
-/* Free any operands vectors in OPS. */
-void
-free_ssa_operands (stmt_operands_p ops)
+/* If STMT is a call that may clobber globals and other symbols that
+ escape, add them to the VDEF/VUSE lists for it. */
+
+static void
+maybe_add_call_vops (gimple stmt)
{
- ops->def_ops = NULL;
- ops->use_ops = NULL;
- ops->maydef_ops = NULL;
- ops->mustdef_ops = NULL;
- ops->vuse_ops = NULL;
+ int call_flags = gimple_call_flags (stmt);
+
+ /* If aliases have been computed already, add VDEF or VUSE
+ operands for all the symbols that have been found to be
+ call-clobbered. */
+ if (!(call_flags & ECF_NOVOPS))
+ {
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
+ if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+ add_virtual_operand (stmt, opf_def);
+ else if (!(call_flags & ECF_CONST))
+ add_virtual_operand (stmt, opf_use);
+ }
}
-/* Get the operands of statement STMT. Note that repeated calls to
- get_stmt_operands for the same statement will do nothing until the
- statement is marked modified by a call to mark_stmt_modified(). */
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
-void
-update_stmt_operands (tree stmt)
+static void
+get_asm_expr_operands (gimple stmt)
{
- stmt_ann_t ann = get_stmt_ann (stmt);
- /* If get_stmt_operands is called before SSA is initialized, dont
- do anything. */
- if (!ssa_operands_active ())
- return;
- /* The optimizers cannot handle statements that are nothing but a
- _DECL. This indicates a bug in the gimplifier. */
- gcc_assert (!SSA_VAR_P (stmt));
-
- gcc_assert (ann->modified);
+ size_t i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
- timevar_push (TV_TREE_OPS);
+ noutputs = gimple_asm_noutputs (stmt);
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- build_ssa_operands (stmt);
+ /* Gather all output operands. */
+ for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+ {
+ tree link = gimple_asm_output_op (stmt, i);
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
- /* Clear the modified bit for STMT. Subsequent calls to
- get_stmt_operands for this statement will do nothing until the
- statement is marked modified by a call to mark_stmt_modified(). */
- ann->modified = 0;
+ /* This should have been split in gimplify_asm_expr. */
+ gcc_assert (!allows_reg || !is_inout);
- timevar_pop (TV_TREE_OPS);
-}
-
-
-/* Copies virtual operands from SRC to DST. */
-
-void
-copy_virtual_operands (tree dest, tree src)
-{
- tree t;
- ssa_op_iter iter, old_iter;
- use_operand_p use_p, u2;
- def_operand_p def_p, d2;
-
- build_ssa_operands (dest);
-
- /* Copy all the virtual fields. */
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
- append_vuse (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
- append_v_may_def (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
- append_v_must_def (t);
-
- if (VEC_length (tree, build_vuses) == 0
- && VEC_length (tree, build_v_may_defs) == 0
- && VEC_length (tree, build_v_must_defs) == 0)
- return;
-
- /* Now commit the virtual operands to this stmt. */
- finalize_ssa_v_must_defs (dest);
- finalize_ssa_v_may_defs (dest);
- finalize_ssa_vuses (dest);
-
- /* Finally, set the field to the same values as then originals. */
-
-
- t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
- FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
- {
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, t);
- t = op_iter_next_tree (&old_iter);
- }
- gcc_assert (op_iter_done (&old_iter));
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
+ {
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t))
+ mark_address_taken (t);
+ }
- op_iter_init_maydef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
- {
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
}
- gcc_assert (op_iter_done (&old_iter));
- op_iter_init_mustdef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
+ /* Gather all input operands. */
+ for (i = 0; i < gimple_asm_ninputs (stmt); i++)
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
- }
- gcc_assert (op_iter_done (&old_iter));
-
-}
-
-
-/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artificial stmt which looks like a load from the store, this can
- be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+ tree link = gimple_asm_input_op (stmt, i);
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
-void
-create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
-{
- stmt_ann_t ann;
- tree op;
- ssa_op_iter iter;
- use_operand_p use_p;
- unsigned x;
-
- ann = get_stmt_ann (new_stmt);
-
- /* process the stmt looking for operands. */
- start_ssa_stmt_operands ();
- parse_ssa_operands (new_stmt);
-
- for (x = 0; x < VEC_length (tree, build_vuses); x++)
- {
- tree t = VEC_index (tree, build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
{
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t))
+ mark_address_taken (t);
}
+
+ get_expr_operands (stmt, &TREE_VALUE (link), 0);
}
-
- for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
+
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
+ for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
{
- tree t = VEC_index (tree, build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
+ tree link = gimple_asm_clobber_op (stmt, i);
+ if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ add_virtual_operand (stmt, opf_def);
+ break;
}
}
- /* Remove any virtual operands that were found. */
- VEC_truncate (tree, build_v_may_defs, 0);
- VEC_truncate (tree, build_v_must_defs, 0);
- VEC_truncate (tree, build_vuses, 0);
-
- /* For each VDEF on the original statement, we want to create a
- VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
- statement. */
- FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
- (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
- append_vuse (op);
-
- /* Now build the operands for this new stmt. */
- finalize_ssa_stmt_operands (new_stmt);
-
- /* All uses in this fake stmt must not be in the immediate use lists. */
- FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
- delink_imm_use (use_p);
}
-void
-swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
-{
- tree op0, op1;
- op0 = *exp0;
- op1 = *exp1;
-
- /* If the operand cache is active, attempt to preserve the relative positions
- of these two operands in their respective immediate use lists. */
- if (ssa_operands_active () && op0 != op1)
- {
- use_optype_p use0, use1, ptr;
- use0 = use1 = NULL;
- /* Find the 2 operands in the cache, if they are there. */
- for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
- if (USE_OP_PTR (ptr)->use == exp0)
- {
- use0 = ptr;
- break;
- }
- for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
- if (USE_OP_PTR (ptr)->use == exp1)
- {
- use1 = ptr;
- break;
- }
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we dont need to worry about it. */
- if (use0 && use1)
- {
- tree *tmp = USE_OP_PTR (use1)->use;
- USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
- USE_OP_PTR (use0)->use = tmp;
- }
- }
-
- /* Now swap the data. */
- *exp0 = op1;
- *exp1 = op0;
-}
-/* Recursively scan the expression pointed to by EXPR_P in statement referred
- to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
- the operands found. */
+/* Recursively scan the expression pointed to by EXPR_P in statement
+ STMT. FLAGS is one of the OPF_* constants modifying how to
+ interpret the operands found. */
static void
-get_expr_operands (tree stmt, tree *expr_p, int flags)
+get_expr_operands (gimple stmt, tree *expr_p, int flags)
{
enum tree_code code;
- enum tree_code_class class;
+ enum tree_code_class codeclass;
tree expr = *expr_p;
- stmt_ann_t s_ann = stmt_ann (stmt);
if (expr == NULL)
return;
code = TREE_CODE (expr);
- class = TREE_CODE_CLASS (code);
+ codeclass = TREE_CODE_CLASS (code);
switch (code)
{
case ADDR_EXPR:
/* Taking the address of a variable does not represent a
- reference to it, but the fact that the stmt takes its address will be
- of interest to some passes (e.g. alias resolution). */
- add_to_addressable_set (TREE_OPERAND (expr, 0),
- &s_ann->addresses_taken);
+ reference to it, but the fact that the statement takes its
+ address will be of interest to some passes (e.g. alias
+ resolution). */
+ mark_address_taken (TREE_OPERAND (expr, 0));
- /* If the address is invariant, there may be no interesting variable
- references inside. */
+ /* If the address is invariant, there may be no interesting
+ variable references inside. */
if (is_gimple_min_invariant (expr))
return;
- /* There should be no VUSEs created, since the referenced objects are
- not really accessed. The only operands that we should find here
- are ARRAY_REF indices which will always be real operands (GIMPLE
- does not allow non-registers as array indices). */
+ /* Otherwise, there may be variables referenced inside but there
+ should be no VUSEs created, since the referenced objects are
+ not really accessed. The only operands that we should find
+ here are ARRAY_REF indices which will always be real operands
+ (GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
-
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case SSA_NAME:
- case STRUCT_FIELD_TAG:
- case TYPE_MEMORY_TAG:
- case NAME_MEMORY_TAG:
-
- add_stmt_operand (expr_p, s_ann, flags);
+ add_stmt_operand (expr_p, stmt, flags);
return;
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- {
- subvar_t svars;
-
- /* Add the subvars for a variable if it has subvars, to DEFS or USES.
- Otherwise, add the variable itself.
- Whether it goes to USES or DEFS depends on the operand flags. */
- if (var_can_have_subvars (expr)
- && (svars = get_subvars_for_var (expr)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- add_stmt_operand (&sv->var, s_ann, flags);
- }
- else
- {
- add_stmt_operand (expr_p, s_ann, flags);
- }
- return;
- }
+ add_stmt_operand (expr_p, stmt, flags);
+ return;
+
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
/* fall through */
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
- get_indirect_ref_operands (stmt, expr, flags, NULL_TREE,
- 0, -1, true);
+ get_indirect_ref_operands (stmt, expr, flags, true);
return;
case TARGET_MEM_REF:
get_tmr_operands (stmt, expr, flags);
return;
- case ARRAY_RANGE_REF:
- /* Treat array references as references to the virtual variable
- representing the array. The virtual variable for an ARRAY_REF
- is the VAR_DECL for the array. */
-
- /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
- according to the value of IS_DEF. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
- return;
-
case ARRAY_REF:
+ case ARRAY_RANGE_REF:
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
{
tree ref;
HOST_WIDE_INT offset, size, maxsize;
- bool none = true;
- /* This component ref becomes an access to all of the subvariables
- it can touch, if we can determine that, but *NOT* the real one.
- If we can't determine which fields we could touch, the recursion
- will eventually get to a variable and add *all* of its subvars, or
- whatever is the minimum correct subset. */
+
+ if (TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
- if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
- {
- subvar_t svars = get_subvars_for_var (ref);
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- {
- bool exact;
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
- {
- int subvar_flags = flags;
- none = false;
- if (!exact
- || size != maxsize)
- subvar_flags &= ~opf_kill_def;
- add_stmt_operand (&sv->var, s_ann, subvar_flags);
- }
- }
- if (!none)
- flags |= opf_no_vops;
- }
- else if (TREE_CODE (ref) == INDIRECT_REF)
+ if (TREE_CODE (ref) == INDIRECT_REF)
{
- get_indirect_ref_operands (stmt, ref, flags, expr,
- offset, maxsize, false);
+ get_indirect_ref_operands (stmt, ref, flags, false);
flags |= opf_no_vops;
}
- /* Even if we found subvars above we need to ensure to see
- immediate uses for d in s.a[d]. In case of s.a having
- a subvar we'd miss it otherwise. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
- flags & ~opf_kill_def);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
- if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
- s_ann->has_volatile_ops = true;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ gimple_set_has_volatile_ops (stmt, true);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
}
- else if (code == ARRAY_REF)
+ else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
}
+
return;
}
+
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
- case CALL_EXPR:
- get_call_expr_operands (stmt, expr);
- return;
-
case COND_EXPR:
case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
return;
- case MODIFY_EXPR:
- {
- int subflags;
- tree op;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
-
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == WITH_SIZE_EXPR)
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == ARRAY_RANGE_REF
- || TREE_CODE (op) == REALPART_EXPR
- || TREE_CODE (op) == IMAGPART_EXPR)
- subflags = opf_is_def;
- else
- subflags = opf_is_def | opf_kill_def;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
- return;
- }
-
case CONSTRUCTOR:
{
/* General aggregate CONSTRUCTORs have been decomposed, but they
@@ -1259,13 +971,17 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
- get_expr_operands (stmt, &ce->value, opf_none);
+ get_expr_operands (stmt, &ce->value, opf_use);
return;
}
- case TRUTH_NOT_EXPR:
case BIT_FIELD_REF:
+ if (TREE_THIS_VOLATILE (expr))
+ gimple_set_has_volatile_ops (stmt, true);
+ /* FALLTHRU */
+
+ case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
@@ -1293,29 +1009,24 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
return;
}
- case BLOCK:
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ gcc_unreachable ();
+
case FUNCTION_DECL:
- case EXC_PTR_EXPR:
- case FILTER_EXPR:
case LABEL_DECL:
case CONST_DECL:
- case OMP_PARALLEL:
- case OMP_SECTIONS:
- case OMP_FOR:
- case OMP_RETURN_EXPR:
- case OMP_SINGLE:
- case OMP_MASTER:
- case OMP_ORDERED:
- case OMP_CRITICAL:
+ case CASE_LABEL_EXPR:
+ case FILTER_EXPR:
+ case EXC_PTR_EXPR:
/* Expressions that make no memory references. */
return;
default:
- if (class == tcc_unary)
+ if (codeclass == tcc_unary)
goto do_unary;
- if (class == tcc_binary || class == tcc_comparison)
+ if (codeclass == tcc_binary || codeclass == tcc_comparison)
goto do_binary;
- if (class == tcc_constant || class == tcc_type)
+ if (codeclass == tcc_constant || codeclass == tcc_type)
return;
}
@@ -1324,786 +1035,162 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
- internal_error ("internal error");
#endif
gcc_unreachable ();
}
-/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
+/* Parse STMT looking for operands. When finished, the various
+ build_* operand vectors will have potential operands in them. */
static void
-get_asm_expr_operands (tree stmt)
+parse_ssa_operands (gimple stmt)
{
- stmt_ann_t s_ann = stmt_ann (stmt);
- int noutputs = list_length (ASM_OUTPUTS (stmt));
- const char **oconstraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i;
- tree link;
- const char *constraint;
- bool allows_mem, allows_reg, is_inout;
+ enum gimple_code code = gimple_code (stmt);
- for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+ if (code == GIMPLE_ASM)
+ get_asm_expr_operands (stmt);
+ else
{
- oconstraints[i] = constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_output_constraint (&constraint, i, 0, 0,
- &allows_mem, &allows_reg, &is_inout);
-
- /* This should have been split in gimplify_asm_expr. */
- gcc_assert (!allows_reg || !is_inout);
+ size_t i, start = 0;
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
+ if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
{
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t) && s_ann)
- add_to_addressable_set (t, &s_ann->addresses_taken);
+ get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
+ start = 1;
}
- get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
- }
-
- for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
- {
- constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
-
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t) && s_ann)
- add_to_addressable_set (t, &s_ann->addresses_taken);
- }
+ for (i = start; i < gimple_num_ops (stmt); i++)
+ get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
- get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ /* Add call-clobbered operands, if needed. */
+ if (code == GIMPLE_CALL)
+ maybe_add_call_vops (stmt);
}
-
-
- /* Clobber memory for asm ("" : : : "memory"); */
- for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- unsigned i;
- bitmap_iterator bi;
-
- /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
- decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
-
- /* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so
- we don't need the original to be added to the clobber
- ops, but the original *will* be in this list because
- we keep the addressability of the original
- variable up-to-date so we don't screw up the rest of
- the backend. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
-
- break;
- }
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
- STMT is the statement being processed, EXPR is the INDIRECT_REF
- that got us here. FLAGS is as in get_expr_operands.
- FULL_REF contains the full pointer dereference expression, if we
- have it, or NULL otherwise.
- OFFSET and SIZE are the location of the access inside the
- dereferenced pointer, if known.
- RECURSE_ON_BASE should be set to true if we want to continue
- calling get_expr_operands on the base pointer, and false if
- something else will do it for us.
-*/
+/* Create an operands cache for STMT. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags,
- tree full_ref,
- HOST_WIDE_INT offset, HOST_WIDE_INT size,
- bool recurse_on_base)
+build_ssa_operands (gimple stmt)
{
- tree *pptr = &TREE_OPERAND (expr, 0);
- tree ptr = *pptr;
- stmt_ann_t s_ann = stmt_ann (stmt);
-
- /* Stores into INDIRECT_REF operands are never killing definitions. */
- flags &= ~opf_kill_def;
-
- if (SSA_VAR_P (ptr))
- {
- struct ptr_info_def *pi = NULL;
-
- /* If PTR has flow-sensitive points-to information, use it. */
- if (TREE_CODE (ptr) == SSA_NAME
- && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
- && pi->name_mem_tag)
- {
- /* PTR has its own memory tag. Use it. */
- add_virtual_operand (pi->name_mem_tag, s_ann, flags,
- full_ref, offset, size, false);
- }
- else
- {
- /* If PTR is not an SSA_NAME or it doesn't have a name
- tag, use its type memory tag. */
- var_ann_t v_ann;
-
- /* If we are emitting debugging dumps, display a warning if
- PTR is an SSA_NAME with no flow-sensitive alias
- information. That means that we may need to compute
- aliasing again. */
- if (dump_file
- && TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
- {
- fprintf (dump_file,
- "NOTE: no flow-sensitive alias info for ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
- }
-
- if (TREE_CODE (ptr) == SSA_NAME)
- ptr = SSA_NAME_VAR (ptr);
- v_ann = var_ann (ptr);
-
- if (v_ann->type_mem_tag)
- add_virtual_operand (v_ann->type_mem_tag, s_ann, flags,
- full_ref, offset, size, false);
- }
- }
-
- /* If a constant is used as a pointer, we can't generate a real
- operand for it but we mark the statement volatile to prevent
- optimizations from messing things up. */
- else if (TREE_CODE (ptr) == INTEGER_CST)
- {
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
- }
- /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
- else
- gcc_unreachable ();
+ /* Initially assume that the statement has no volatile operands. */
+ gimple_set_has_volatile_ops (stmt, false);
- /* Add a USE operand for the base pointer. */
- if (recurse_on_base)
- get_expr_operands (stmt, pptr, opf_none);
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+ finalize_ssa_stmt_operands (stmt);
}
-/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
-
-static void
-get_tmr_operands (tree stmt, tree expr, int flags)
-{
- tree tag = TMR_TAG (expr), ref;
- HOST_WIDE_INT offset, size, maxsize;
- subvar_t svars, sv;
- stmt_ann_t s_ann = stmt_ann (stmt);
-
- /* First record the real operands. */
- get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
- get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
-
- /* MEM_REFs should never be killing. */
- flags &= ~opf_kill_def;
-
- if (TMR_SYMBOL (expr))
- {
- stmt_ann_t ann = stmt_ann (stmt);
- add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
- }
-
- if (!tag)
- {
- /* Something weird, so ensure that we will be careful. */
- stmt_ann (stmt)->has_volatile_ops = true;
- return;
- }
-
- if (DECL_P (tag))
- {
- get_expr_operands (stmt, &tag, flags);
- return;
- }
-
- ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
- gcc_assert (ref != NULL_TREE);
- svars = get_subvars_for_var (ref);
- for (sv = svars; sv; sv = sv->next)
- {
- bool exact;
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
- {
- int subvar_flags = flags;
- if (!exact || size != maxsize)
- subvar_flags &= ~opf_kill_def;
- add_stmt_operand (&sv->var, s_ann, subvar_flags);
- }
- }
-}
-/* A subroutine of get_expr_operands to handle CALL_EXPR. */
+/* Releases the operands of STMT back to their freelists, and clears
+ the stmt operand lists. */
-static void
-get_call_expr_operands (tree stmt, tree expr)
+void
+free_stmt_operands (gimple stmt)
{
- tree op;
- int call_flags = call_expr_flags (expr);
+ def_optype_p defs = gimple_def_ops (stmt), last_def;
+ use_optype_p uses = gimple_use_ops (stmt), last_use;
- /* If aliases have been computed already, add V_MAY_DEF or V_USE
- operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
- if (aliases_computed_p
- && !bitmap_empty_p (call_clobbered_vars)
- && !(call_flags & ECF_NOVOPS))
+ if (defs)
{
- /* A 'pure' or a 'const' function never call-clobbers anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
- if (TREE_SIDE_EFFECTS (expr)
- && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt, get_callee_fndecl (expr));
- else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt, get_callee_fndecl (expr));
+ for (last_def = defs; last_def->next; last_def = last_def->next)
+ continue;
+ last_def->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = defs;
+ gimple_set_def_ops (stmt, NULL);
}
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
-
-}
-
-/* REF is a tree that contains the entire pointer dereference
- expression, if available, or NULL otherwise. ALIAS is the variable
- we are asking if REF can access. OFFSET and SIZE come from the
- memory access expression that generated this virtual operand.
- FOR_CLOBBER is true is this is adding a virtual operand for a call
- clobber. */
-
-static bool
-access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
- HOST_WIDE_INT size)
-{
- bool offsetgtz = offset > 0;
- unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
- tree base = ref ? get_base_address (ref) : NULL;
-
- /* If ALIAS is an SFT, it can't be touched if the offset
- and size of the access is not overlapping with the SFT offset and
- size. This is only true if we are accessing through a pointer
- to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
- be accessing through a pointer to some substruct of the
- structure, and if we try to prune there, we will have the wrong
- offset, and get the wrong answer.
- i.e., we can't prune without more work if we have something like
- struct gcc_target
- {
- struct asm_out
- {
- const char *byte_op;
- struct asm_int_op
- {
- const char *hi;
- } aligned_op;
- } asm_out;
- } targetm;
-
- foo = &targetm.asm_out.aligned_op;
- return foo->hi;
-
- SFT.1, which represents hi, will have SFT_OFFSET=32 because in
- terms of SFT_PARENT_VAR, that is where it is.
- However, the access through the foo pointer will be at offset 0.
- */
-
- if (size != -1
- && TREE_CODE (alias) == STRUCT_FIELD_TAG
- && base
- && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
- && !overlap_subvar (offset, size, alias, NULL))
+ if (uses)
{
-#ifdef ACCESS_DEBUGGING
- fprintf (stderr, "Access to ");
- print_generic_expr (stderr, ref, 0);
- fprintf (stderr, " may not touch ");
- print_generic_expr (stderr, alias, 0);
- fprintf (stderr, " in function %s\n", get_name (current_function_decl));
-#endif
- return false;
- }
-
- /* Without strict aliasing, it is impossible for a component access
- through a pointer to touch a random variable, unless that
- variable *is* a structure or a pointer.
-
-
- IE given p->c, and some random global variable b,
- there is no legal way that p->c could be an access to b.
-
- Without strict aliasing on, we consider it legal to do something
- like:
- struct foos { int l; };
- int foo;
- static struct foos *getfoo(void);
- int main (void)
- {
- struct foos *f = getfoo();
- f->l = 1;
- foo = 2;
- if (f->l == 1)
- abort();
- exit(0);
- }
- static struct foos *getfoo(void)
- { return (struct foos *)&foo; }
-
- (taken from 20000623-1.c)
- */
-
- else if (ref
- && flag_strict_aliasing
- && TREE_CODE (ref) != INDIRECT_REF
- && !MTAG_P (alias)
- && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
- && !TREE_CODE (TREE_TYPE (alias)) == COMPLEX_TYPE
- && !POINTER_TYPE_P (TREE_TYPE (alias)))
- {
-#ifdef ACCESS_DEBUGGING
- fprintf (stderr, "Access to ");
- print_generic_expr (stderr, ref, 0);
- fprintf (stderr, " may not touch ");
- print_generic_expr (stderr, alias, 0);
- fprintf (stderr, " in function %s\n", get_name (current_function_decl));
-#endif
- return false;
+ for (last_use = uses; last_use->next; last_use = last_use->next)
+ delink_imm_use (USE_OP_PTR (last_use));
+ delink_imm_use (USE_OP_PTR (last_use));
+ last_use->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = uses;
+ gimple_set_use_ops (stmt, NULL);
}
- /* If the offset of the access is greater than the size of one of
- the possible aliases, it can't be touching that alias, because it
- would be past the end of the structure. */
-
- else if (ref
- && flag_strict_aliasing
- && TREE_CODE (ref) != INDIRECT_REF
- && !MTAG_P (alias)
- && !POINTER_TYPE_P (TREE_TYPE (alias))
- && offsetgtz
- && DECL_SIZE (alias)
- && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
- && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
+ if (gimple_has_mem_ops (stmt))
{
-#ifdef ACCESS_DEBUGGING
- fprintf (stderr, "Access to ");
- print_generic_expr (stderr, ref, 0);
- fprintf (stderr, " may not touch ");
- print_generic_expr (stderr, alias, 0);
- fprintf (stderr, " in function %s\n", get_name (current_function_decl));
-#endif
- return false;
- }
- return true;
-}
-
-/* Add VAR to the virtual operands array. FLAGS is as in
- get_expr_operands. FULL_REF is a tree that contains the entire
- pointer dereference expression, if available, or NULL otherwise.
- OFFSET and SIZE come from the memory access expression that
- generated this virtual operand. FOR_CLOBBER is true is this is
- adding a virtual operand for a call clobber. */
-
-static void
-add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
- tree full_ref, HOST_WIDE_INT offset,
- HOST_WIDE_INT size, bool for_clobber)
-{
- VEC(tree,gc) *aliases;
- tree sym;
- var_ann_t v_ann;
-
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
-
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
-
- /* If the variable cannot be modified and this is a V_MAY_DEF change
- it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writable variables. So we only
- check that this only happens on non-specific stores.
-
- Note that if this is a specific store, i.e. associated with a
- modify_expr, then we can't suppress the V_DEF, lest we run into
- validation problems.
-
- This can happen when programs cast away const, leaving us with a
- store to read-only memory. If the statement is actually executed
- at runtime, then the program is ill formed. If the statement is
- not executed then all is well. At the very least, we cannot ICE. */
- if ((flags & opf_non_specific) && unmodifiable_var_p (var))
- flags &= ~(opf_is_def | opf_kill_def);
-
-
- /* The variable is not a GIMPLE register. Add it (or its aliases) to
- virtual operands, unless the caller has specifically requested
- not to add virtual operands (used when adding operands inside an
- ADDR_EXPR expression). */
- if (flags & opf_no_vops)
- return;
-
- aliases = v_ann->may_aliases;
- if (aliases == NULL)
- {
- /* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- {
- if (flags & opf_kill_def)
- {
- /* Only regular variables or struct fields may get a
- V_MUST_DEF operand. */
- gcc_assert (!MTAG_P (var)
- || TREE_CODE (var) == STRUCT_FIELD_TAG);
- /* V_MUST_DEF for non-aliased, non-GIMPLE register
- variable definitions. */
- append_v_must_def (var);
- }
- else
- {
- /* Add a V_MAY_DEF for call-clobbered variables and
- memory tags. */
- append_v_may_def (var);
- }
- }
- else
- append_vuse (var);
- }
- else
- {
- unsigned i;
- tree al;
-
- /* The variable is aliased. Add its aliases to the virtual
- operands. */
- gcc_assert (VEC_length (tree, aliases) != 0);
-
- if (flags & opf_is_def)
- {
-
- bool none_added = true;
-
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
- {
- if (!access_can_touch_variable (full_ref, al, offset, size))
- continue;
-
- none_added = false;
- append_v_may_def (al);
- }
-
- /* If the variable is also an alias tag, add a virtual
- operand for it, otherwise we will miss representing
- references to the members of the variable's alias set.
- This fixes the bug in gcc.c-torture/execute/20020503-1.c.
-
- It is also necessary to add bare defs on clobbers for
- TMT's, so that bare TMT uses caused by pruning all the
- aliases will link up properly with calls. */
- if (v_ann->is_alias_tag || none_added
- || (TREE_CODE (var) == TYPE_MEMORY_TAG && for_clobber))
- {
- /* We should never end up with adding no aliases of an
- NMT, as that would imply we got the set wrong. */
- gcc_assert (!(none_added && TREE_CODE (var) == NAME_MEMORY_TAG));
-
- append_v_may_def (var);
- }
- }
- else
- {
- bool none_added = true;
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
- {
- if (!access_can_touch_variable (full_ref, al, offset, size))
- continue;
- none_added = false;
- append_vuse (al);
- }
-
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_alias_tag || none_added)
- {
- gcc_assert (!(none_added && TREE_CODE (var) == NAME_MEMORY_TAG));
-
- append_vuse (var);
- }
- }
+ gimple_set_vuse (stmt, NULL_TREE);
+ gimple_set_vdef (stmt, NULL_TREE);
}
}
-/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
- get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
- the statement's real operands, otherwise it is added to virtual
- operands. */
-static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
-{
- bool is_real_op;
- tree var, sym;
- var_ann_t v_ann;
-
- var = *var_p;
- gcc_assert (SSA_VAR_P (var));
-
- is_real_op = is_gimple_reg (var);
- /* If this is a real operand, the operand is either ssa name or decl.
- Virtual operands may only be decls. */
- gcc_assert (is_real_op || DECL_P (var));
-
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
-
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
-
- if (is_real_op)
- {
- /* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
- append_def (var_p);
- else
- append_use (var_p);
- }
- else
- add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
-}
-
-/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
- *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
- a single variable whose address has been taken or any other valid
- GIMPLE memory reference (structure reference, array, etc). If the
- base address of REF is a decl that has sub-variables, also add all
- of its sub-variables. */
+/* Get the operands of statement STMT. */
void
-add_to_addressable_set (tree ref, bitmap *addresses_taken)
+update_stmt_operands (gimple stmt)
{
- tree var;
- subvar_t svars;
-
- gcc_assert (addresses_taken);
-
- /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
- as the only thing we take the address of. If VAR is a structure,
- taking the address of a field means that the whole structure may
- be referenced using pointer arithmetic. See PR 21407 and the
- ensuing mailing list discussion. */
- var = get_base_address (ref);
- if (var && SSA_VAR_P (var))
- {
- if (*addresses_taken == NULL)
- *addresses_taken = BITMAP_GGC_ALLOC ();
-
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- {
- bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
- TREE_ADDRESSABLE (sv->var) = 1;
- }
- }
- else
- {
- bitmap_set_bit (*addresses_taken, DECL_UID (var));
- TREE_ADDRESSABLE (var) = 1;
- }
- }
-}
-
-/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
- clobbered variables in the function. */
-
-static void
-add_call_clobber_ops (tree stmt, tree callee)
-{
- unsigned u;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- bitmap not_read_b, not_written_b;
-
- /* Functions that are not const, pure or never return may clobber
- call-clobbered variables. */
- if (s_ann)
- s_ann->makes_clobbering_call = true;
-
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- return;
- }
-
- /* Get info for local and module level statics. There is a bit
- set for each static if the call being processed does not read
- or write that variable. */
-
- not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
- not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
- /* Add a V_MAY_DEF operand for every call clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
- {
- tree var = referenced_var_lookup (u);
- unsigned int escape_mask = var_ann (var)->escape_mask;
- tree real_var = var;
- bool not_read;
- bool not_written;
-
- /* Not read and not written are computed on regular vars, not
- subvars, so look at the parent var if this is an SFT. */
-
- if (TREE_CODE (var) == STRUCT_FIELD_TAG)
- real_var = SFT_PARENT_VAR (var);
+ /* If update_stmt_operands is called before SSA is initialized, do
+ nothing. */
+ if (!ssa_operands_active ())
+ return;
- not_read = not_read_b ? bitmap_bit_p (not_read_b,
- DECL_UID (real_var)) : false;
- not_written = not_written_b ? bitmap_bit_p (not_written_b,
- DECL_UID (real_var)) : false;
- gcc_assert (!unmodifiable_var_p (var));
-
- clobber_stats.clobbered_vars++;
+ timevar_push (TV_TREE_OPS);
- /* See if this variable is really clobbered by this function. */
+ gcc_assert (gimple_modified_p (stmt));
+ build_ssa_operands (stmt);
+ gimple_set_modified (stmt, false);
- /* Trivial case: Things escaping only to pure/const are not
- clobbered by non-pure-const, and only read by pure/const. */
- if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
- {
- tree call = get_call_expr_in (stmt);
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
- {
- add_stmt_operand (&var, s_ann, opf_none);
- clobber_stats.unescapable_clobbers_avoided++;
- continue;
- }
- else
- {
- clobber_stats.unescapable_clobbers_avoided++;
- continue;
- }
- }
-
- if (not_written)
- {
- clobber_stats.static_write_clobbers_avoided++;
- if (!not_read)
- add_stmt_operand (&var, s_ann, opf_none);
- else
- clobber_stats.static_read_clobbers_avoided++;
- }
- else
- add_virtual_operand (var, s_ann, opf_is_def,
- NULL, 0, -1, true);
- }
-
+ timevar_pop (TV_TREE_OPS);
}
-/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
- function. */
+/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
+ to test the validity of the swap operation. */
-static void
-add_call_read_ops (tree stmt, tree callee)
+void
+swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
{
- unsigned u;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- bitmap not_read_b;
-
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_none);
- return;
- }
-
- not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+ tree op0, op1;
+ op0 = *exp0;
+ op1 = *exp1;
- /* Add a VUSE for each call-clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
+ /* If the operand cache is active, attempt to preserve the relative
+ positions of these two operands in their respective immediate use
+ lists. */
+ if (ssa_operands_active () && op0 != op1)
{
- tree var = referenced_var (u);
- tree real_var = var;
- bool not_read;
-
- clobber_stats.readonly_clobbers++;
+ use_optype_p use0, use1, ptr;
+ use0 = use1 = NULL;
- /* Not read and not written are computed on regular vars, not
- subvars, so look at the parent var if this is an SFT. */
+ /* Find the 2 operands in the cache, if they are there. */
+ for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp0)
+ {
+ use0 = ptr;
+ break;
+ }
- if (TREE_CODE (var) == STRUCT_FIELD_TAG)
- real_var = SFT_PARENT_VAR (var);
+ for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp1)
+ {
+ use1 = ptr;
+ break;
+ }
- not_read = not_read_b ? bitmap_bit_p (not_read_b,
- DECL_UID (real_var)) : false;
-
- if (not_read)
- {
- clobber_stats.static_readonly_clobbers_avoided++;
- continue;
+ /* If both uses don't have operand entries, there isn't much we can do
+ at this point. Presumably we don't need to worry about it. */
+ if (use0 && use1)
+ {
+ tree *tmp = USE_OP_PTR (use1)->use;
+ USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
+ USE_OP_PTR (use0)->use = tmp;
}
-
- add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
}
+
+ /* Now swap the data. */
+ *exp0 = op1;
+ *exp1 = op0;
}
/* Scan the immediate_use list for VAR making sure its linked properly.
- return RTUE iof there is a problem. */
+ Return TRUE if there is a problem and emit an error message to F. */
bool
verify_imm_links (FILE *f, tree var)
@@ -2136,7 +1223,9 @@ verify_imm_links (FILE *f, tree var)
prev = ptr;
ptr = ptr->next;
- /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
+
+ /* Avoid infinite loops. 50,000,000 uses probably indicates a
+ problem. */
if (count++ > 50000000)
goto error;
}
@@ -2159,10 +1248,10 @@ verify_imm_links (FILE *f, tree var)
return false;
error:
- if (ptr->stmt && stmt_modified_p (ptr->stmt))
+ if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
{
- fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
- print_generic_stmt (f, ptr->stmt, TDF_SLIM);
+ fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
+ print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
}
fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
(void *)ptr->use);
@@ -2194,14 +1283,18 @@ dump_immediate_uses_for (FILE *file, tree var)
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- if (!is_gimple_reg (USE_FROM_PTR (use_p)))
- print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ if (use_p->loc.stmt == NULL && use_p->use == NULL)
+ fprintf (file, "***end of stmt iterator marker***\n");
else
- print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
+ if (!is_gimple_reg (USE_FROM_PTR (use_p)))
+ print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
+ else
+ print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
}
fprintf(file, "\n");
}
+
/* Dump all the immediate uses to FILE. */
void
@@ -2229,6 +1322,7 @@ debug_immediate_uses (void)
dump_immediate_uses (stderr);
}
+
/* Dump def-use edges on stderr. */
void
@@ -2236,4 +1330,85 @@ debug_immediate_uses_for (tree var)
{
dump_immediate_uses_for (stderr, var);
}
-#include "gt-tree-ssa-operands.h"
+
+
+/* Push *STMT_P on the SCB_STACK. This function is deprecated, do not
+ introduce new uses of it. */
+
+void
+push_stmt_changes (gimple *stmt_p)
+{
+ gimple stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (gimple_code (stmt) == GIMPLE_PHI)
+ return;
+
+ VEC_safe_push (gimple_p, heap, scb_stack, stmt_p);
+}
+
+/* Pop the top stmt from SCB_STACK and act on the differences between
+ what was recorded by push_stmt_changes and the current state of
+ the statement. This function is deprecated, do not introduce
+ new uses of it. */
+
+void
+pop_stmt_changes (gimple *stmt_p)
+{
+ gimple *stmt2_p, stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (gimple_code (stmt) == GIMPLE_PHI)
+ return;
+
+ stmt2_p = VEC_pop (gimple_p, scb_stack);
+ gcc_assert (stmt_p == stmt2_p);
+
+ /* Force an operand re-scan on the statement and mark any newly
+ exposed variables. This also will mark the virtual operand
+ for renaming if necessary. */
+ update_stmt (stmt);
+}
+
+/* Discard the topmost stmt from SCB_STACK. This is useful
+ when the caller realized that it did not actually modified the
+ statement. It avoids the expensive operand re-scan.
+ This function is deprecated, do not introduce new uses of it. */
+
+void
+discard_stmt_changes (gimple *stmt_p)
+{
+ gimple *stmt2_p, stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (gimple_code (stmt) == GIMPLE_PHI)
+ return;
+
+ stmt2_p = VEC_pop (gimple_p, scb_stack);
+ gcc_assert (stmt_p == stmt2_p);
+}
+
+/* Unlink STMTs virtual definition from the IL by propagating its use. */
+
+void
+unlink_stmt_vdef (gimple stmt)
+{
+ use_operand_p use_p;
+ imm_use_iterator iter;
+ gimple use_stmt;
+ tree vdef = gimple_vdef (stmt);
+
+ if (!vdef
+ || TREE_CODE (vdef) != SSA_NAME)
+ return;
+
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt))
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, gimple_vuse (stmt));
+ }
+
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)))
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1;
+}
+