/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
-#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-
+#include "toplev.h"
#include "langhooks.h"
+#include "ipa-reference.h"
/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
The routines in this file are concerned with creating this operand cache
from a stmt tree.
- get_stmt_operands() in the primary entry point.
-
The operand tree is the parsed by the various get_* routines which look
through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
on each of the 5 operand vectors which have been built up.
If the stmt had a previous operand cache, the finalization routines
- attempt to match up the new operands with the old ones. If its a perfect
+ attempt to match up the new operands with the old ones. If it's a perfect
match, the old vector is simply reused. If it isn't a perfect match, then
a new vector is created and the new operands are placed there. For
virtual operands, if the previous cache had SSA_NAME version of a
i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'.
-
-*/
-
+ it contains 'a_5' rather than 'a'. */
-/* Flags to describe operand properties in get_stmt_operands and helpers. */
+/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
#define opf_none 0
/* Operand is the target of an assignment expression or a
- call-clobbered variable */
+ call-clobbered variable. */
#define opf_is_def (1 << 0)
/* Operand is the target of an assignment expression. */
VUSE for 'b'. */
#define opf_no_vops (1 << 2)
+/* Operand is a "non-specific" kill for call-clobbers and such. This
+ is used to distinguish "reset the world" events from explicit
+ GIMPLE_MODIFY_STMTs. */
+#define opf_non_specific (1 << 3)
+
/* Array for building all the def operands. */
-static GTY (()) varray_type build_defs;
+static VEC(tree,heap) *build_defs;
/* Array for building all the use operands. */
-static GTY (()) varray_type build_uses;
+static VEC(tree,heap) *build_uses;
-/* Array for building all the v_may_def operands. */
-static GTY (()) varray_type build_v_may_defs;
+/* Array for building all the V_MAY_DEF operands. */
+static VEC(tree,heap) *build_v_may_defs;
-/* Array for building all the vuse operands. */
-static GTY (()) varray_type build_vuses;
+/* Array for building all the VUSE operands. */
+static VEC(tree,heap) *build_vuses;
-/* Array for building all the v_must_def operands. */
-static GTY (()) varray_type build_v_must_defs;
+/* Array for building all the V_MUST_DEF operands. */
+static VEC(tree,heap) *build_v_must_defs;
-/* True if the operands for call clobbered vars are cached and valid. */
-bool ssa_call_clobbered_cache_valid;
-bool ssa_ro_call_cache_valid;
+static void get_expr_operands (tree, tree *, int);
-/* These arrays are the cached operand vectors for call clobbered calls. */
-static GTY (()) varray_type clobbered_v_may_defs;
-static GTY (()) varray_type clobbered_vuses;
-static GTY (()) varray_type ro_call_vuses;
-static bool clobbered_aliased_loads;
-static bool clobbered_aliased_stores;
-static bool ro_call_aliased_loads;
+/* Number of functions with initialized ssa_operands. */
+static int n_initialized = 0;
+
+/* Allocates operand OP of given TYPE from the appropriate free list,
+ or of the new value if the list is empty. */
+
+#define ALLOC_OPTYPE(OP, TYPE) \
+ do \
+ { \
+ TYPE##_optype_p ret \
+ = gimple_ssa_operands (cfun)->free_##TYPE##s; \
+ if (ret) \
+ gimple_ssa_operands (cfun)->free_##TYPE##s \
+ = ret->next; \
+ else \
+ ret = ssa_operand_alloc (sizeof (*ret)); \
+ (OP) = ret; \
+ } while (0)
+
+/* Return the DECL_UID of the base variable of T. */
+
+static inline unsigned
+get_name_decl (tree t)
+{
+ if (TREE_CODE (t) != SSA_NAME)
+ return DECL_UID (t);
+ else
+ return DECL_UID (SSA_NAME_VAR (t));
+}
-#ifdef ENABLE_CHECKING
-/* Used to make sure operand construction is working on the proper stmt. */
-tree check_build_stmt;
-#endif
-def_operand_p NULL_DEF_OPERAND_P = { NULL };
-use_operand_p NULL_USE_OPERAND_P = { NULL };
+/* Comparison function for qsort used in operand_build_sort_virtual. */
-static void note_addressable (tree, stmt_ann_t);
-static void get_expr_operands (tree, tree *, int);
-static void get_asm_expr_operands (tree);
-static void get_indirect_ref_operands (tree, tree, int);
-static void get_call_expr_operands (tree, tree);
-static inline void append_def (tree *);
-static inline void append_use (tree *);
-static void append_v_may_def (tree);
-static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree);
-static void add_call_read_ops (tree);
-static void add_stmt_operand (tree *, stmt_ann_t, int);
-
-/* Return a vector of contiguous memory for NUM def operands. */
-
-static inline def_optype
-allocate_def_optype (unsigned num)
+static int
+operand_build_cmp (const void *p, const void *q)
{
- def_optype def_ops;
- unsigned size;
- size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
- def_ops = ggc_alloc (size);
- def_ops->num_defs = num;
- return def_ops;
+ tree e1 = *((const tree *)p);
+ tree e2 = *((const tree *)q);
+ unsigned int u1,u2;
+
+ u1 = get_name_decl (e1);
+ u2 = get_name_decl (e2);
+
+ /* We want to sort in ascending order. They can never be equal. */
+#ifdef ENABLE_CHECKING
+ gcc_assert (u1 != u2);
+#endif
+ return (u1 > u2 ? 1 : -1);
}
-/* Return a vector of contiguous memory for NUM use operands. */
+/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
-static inline use_optype
-allocate_use_optype (unsigned num)
+static inline void
+operand_build_sort_virtual (VEC(tree,heap) *list)
{
- use_optype use_ops;
- unsigned size;
- size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
- use_ops = ggc_alloc (size);
- use_ops->num_uses = num;
- return use_ops;
-}
+ int num = VEC_length (tree, list);
+ if (num < 2)
+ return;
-/* Return a vector of contiguous memory for NUM v_may_def operands. */
+ if (num == 2)
+ {
+ if (get_name_decl (VEC_index (tree, list, 0))
+ > get_name_decl (VEC_index (tree, list, 1)))
+ {
+ /* Swap elements if in the wrong order. */
+ tree tmp = VEC_index (tree, list, 0);
+ VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
+ VEC_replace (tree, list, 1, tmp);
+ }
+ return;
+ }
-static inline v_may_def_optype
-allocate_v_may_def_optype (unsigned num)
-{
- v_may_def_optype v_may_def_ops;
- unsigned size;
- size = sizeof (struct v_may_def_optype_d)
- + sizeof (v_def_use_operand_type_t) * (num - 1);
- v_may_def_ops = ggc_alloc (size);
- v_may_def_ops->num_v_may_defs = num;
- return v_may_def_ops;
+ /* There are 3 or more elements, call qsort. */
+ qsort (VEC_address (tree, list),
+ VEC_length (tree, list),
+ sizeof (tree),
+ operand_build_cmp);
}
-/* Return a vector of contiguous memory for NUM v_use operands. */
+/* Return true if the SSA operands cache is active. */
-static inline vuse_optype
-allocate_vuse_optype (unsigned num)
+bool
+ssa_operands_active (void)
{
- vuse_optype vuse_ops;
- unsigned size;
- size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
- vuse_ops = ggc_alloc (size);
- vuse_ops->num_vuses = num;
- return vuse_ops;
+ return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
-/* Return a vector of contiguous memory for NUM v_must_def operands. */
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
-static inline v_must_def_optype
-allocate_v_must_def_optype (unsigned num)
+static struct
{
- v_must_def_optype v_must_def_ops;
- unsigned size;
- size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
- v_must_def_ops = ggc_alloc (size);
- v_must_def_ops->num_v_must_defs = num;
- return v_must_def_ops;
-}
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobber_ops. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (V_MAY_DEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
-/* Free memory for USES. */
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_ops. */
+ unsigned int readonly_clobbers;
-static inline void
-free_uses (use_optype *uses)
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
+
+
+/* Initialize the operand cache routines. */
+
+void
+init_ssa_operands (void)
{
- if (*uses)
+ if (!n_initialized++)
{
- ggc_free (*uses);
- *uses = NULL;
+ build_defs = VEC_alloc (tree, heap, 5);
+ build_uses = VEC_alloc (tree, heap, 10);
+ build_vuses = VEC_alloc (tree, heap, 25);
+ build_v_may_defs = VEC_alloc (tree, heap, 25);
+ build_v_must_defs = VEC_alloc (tree, heap, 25);
}
+
+ gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
+ gimple_ssa_operands (cfun)->ops_active = true;
+ memset (&clobber_stats, 0, sizeof (clobber_stats));
}
-/* Free memory for DEFS. */
+/* Dispose of anything required by the operand routines. */
-static inline void
-free_defs (def_optype *defs)
+void
+fini_ssa_operands (void)
{
- if (*defs)
+ struct ssa_operand_memory_d *ptr;
+ if (!--n_initialized)
{
- ggc_free (*defs);
- *defs = NULL;
+ VEC_free (tree, heap, build_defs);
+ VEC_free (tree, heap, build_uses);
+ VEC_free (tree, heap, build_v_must_defs);
+ VEC_free (tree, heap, build_v_may_defs);
+ VEC_free (tree, heap, build_vuses);
+ }
+ gimple_ssa_operands (cfun)->free_defs = NULL;
+ gimple_ssa_operands (cfun)->free_uses = NULL;
+ gimple_ssa_operands (cfun)->free_vuses = NULL;
+ gimple_ssa_operands (cfun)->free_maydefs = NULL;
+ gimple_ssa_operands (cfun)->free_mustdefs = NULL;
+ while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
+ {
+ gimple_ssa_operands (cfun)->operand_memory
+ = gimple_ssa_operands (cfun)->operand_memory->next;
+ ggc_free (ptr);
}
-}
+ gimple_ssa_operands (cfun)->ops_active = false;
+
+ if (dump_file && (dump_flags & TDF_STATS))
+ {
+ fprintf (dump_file, "Original clobbered vars:%d\n",
+ clobber_stats.clobbered_vars);
+ fprintf (dump_file, "Static write clobbers avoided:%d\n",
+ clobber_stats.static_write_clobbers_avoided);
+ fprintf (dump_file, "Static read clobbers avoided:%d\n",
+ clobber_stats.static_read_clobbers_avoided);
+ fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
+ clobber_stats.unescapable_clobbers_avoided);
+ fprintf (dump_file, "Original read-only clobbers:%d\n",
+ clobber_stats.readonly_clobbers);
+ fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
+ clobber_stats.static_readonly_clobbers_avoided);
+ }
+}
-/* Free memory for VUSES. */
-static inline void
-free_vuses (vuse_optype *vuses)
+/* Return memory for operands of SIZE chunks. */
+
+static inline void *
+ssa_operand_alloc (unsigned size)
{
- if (*vuses)
+ char *ptr;
+ if (gimple_ssa_operands (cfun)->operand_memory_index + size
+ >= SSA_OPERAND_MEMORY_SIZE)
{
- ggc_free (*vuses);
- *vuses = NULL;
+ struct ssa_operand_memory_d *ptr;
+ ptr = GGC_NEW (struct ssa_operand_memory_d);
+ ptr->next = gimple_ssa_operands (cfun)->operand_memory;
+ gimple_ssa_operands (cfun)->operand_memory = ptr;
+ gimple_ssa_operands (cfun)->operand_memory_index = 0;
}
+ ptr = &(gimple_ssa_operands (cfun)->operand_memory
+ ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
+ gimple_ssa_operands (cfun)->operand_memory_index += size;
+ return ptr;
}
-/* Free memory for V_MAY_DEFS. */
+
+/* This routine makes sure that PTR is in an immediate use list, and makes
+ sure the stmt pointer is set to the current stmt. */
static inline void
-free_v_may_defs (v_may_def_optype *v_may_defs)
+set_virtual_use_link (use_operand_p ptr, tree stmt)
{
- if (*v_may_defs)
- {
- ggc_free (*v_may_defs);
- *v_may_defs = NULL;
- }
+ /* fold_stmt may have changed the stmt pointers. */
+ if (ptr->stmt != stmt)
+ ptr->stmt = stmt;
+
+ /* If this use isn't in a list, add it to the correct list. */
+ if (!ptr->prev)
+ link_imm_use (ptr, *(ptr->use));
}
+/* Appends ELT after TO, and moves the TO pointer to ELT. */
+
+#define APPEND_OP_AFTER(ELT, TO) \
+ do \
+ { \
+ (TO)->next = (ELT); \
+ (TO) = (ELT); \
+ } while (0)
+
+/* Appends head of list FROM after TO, and move both pointers
+ to their successors. */
+
+#define MOVE_HEAD_AFTER(FROM, TO) \
+ do \
+ { \
+ APPEND_OP_AFTER (FROM, TO); \
+ (FROM) = (FROM)->next; \
+ } while (0)
+
+/* Moves OP to appropriate freelist. OP is set to its successor. */
+
+#define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
+ do \
+ { \
+ TYPE##_optype_p next = (OP)->next; \
+ (OP)->next \
+ = gimple_ssa_operands (cfun)->free_##TYPE##s; \
+ gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
+ (OP) = next; \
+ } while (0)
+
+/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
+ of immediate uses. STMT is the current statement. */
+
+#define INITIALIZE_USE(USE_PTR, VAL, STMT) \
+ do \
+ { \
+ (USE_PTR)->use = (VAL); \
+ link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
+ } while (0)
+
+/* Adds OP to the list of defs after LAST, and moves
+ LAST to the new element. */
+
+static inline void
+add_def_op (tree *op, def_optype_p *last)
+{
+ def_optype_p new;
+
+ ALLOC_OPTYPE (new, def);
+ DEF_OP_PTR (new) = op;
+ APPEND_OP_AFTER (new, *last);
+}
-/* Free memory for V_MUST_DEFS. */
+/* Adds OP to the list of uses of statement STMT after LAST, and moves
+ LAST to the new element. */
static inline void
-free_v_must_defs (v_must_def_optype *v_must_defs)
+add_use_op (tree stmt, tree *op, use_optype_p *last)
{
- if (*v_must_defs)
- {
- ggc_free (*v_must_defs);
- *v_must_defs = NULL;
- }
+ use_optype_p new;
+
+ ALLOC_OPTYPE (new, use);
+ INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
+ APPEND_OP_AFTER (new, *last);
}
+/* Adds OP to the list of vuses of statement STMT after LAST, and moves
+ LAST to the new element. */
-/* Initialize the operand cache routines. */
+static inline void
+add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
+{
+ vuse_optype_p new;
-void
-init_ssa_operands (void)
+ ALLOC_OPTYPE (new, vuse);
+ VUSE_OP (new) = op;
+ INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
+ APPEND_OP_AFTER (new, *last);
+}
+
+/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
+ LAST to the new element. */
+
+static inline void
+add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
{
- VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
- VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
- VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
- VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
- VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
+ maydef_optype_p new;
+
+ ALLOC_OPTYPE (new, maydef);
+ MAYDEF_RESULT (new) = op;
+ MAYDEF_OP (new) = op;
+ INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
+ APPEND_OP_AFTER (new, *last);
}
+/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
+ LAST to the new element. */
-/* Dispose of anything required by the operand routines. */
+static inline void
+add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
+{
+ mustdef_optype_p new;
-void
-fini_ssa_operands (void)
+ ALLOC_OPTYPE (new, mustdef);
+ MUSTDEF_RESULT (new) = op;
+ MUSTDEF_KILL (new) = op;
+ INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
+ APPEND_OP_AFTER (new, *last);
+}
+
+/* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Given that def operands list is not necessarily sorted, merging
+ the operands this way does not make much sense.
+ -- Make build_defs VEC of tree *. */
+
+static inline void
+finalize_ssa_def_ops (tree stmt)
{
- ggc_free (build_defs);
- ggc_free (build_uses);
- ggc_free (build_v_may_defs);
- ggc_free (build_vuses);
- ggc_free (build_v_must_defs);
- build_defs = NULL;
- build_uses = NULL;
- build_v_may_defs = NULL;
- build_vuses = NULL;
- build_v_must_defs = NULL;
- if (clobbered_v_may_defs)
+ unsigned new_i;
+ struct def_optype_d new_list;
+ def_optype_p old_ops, last;
+ tree *old_base;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = DEF_OPS (stmt);
+
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_defs))
{
- ggc_free (clobbered_v_may_defs);
- ggc_free (clobbered_vuses);
- clobbered_v_may_defs = NULL;
- clobbered_vuses = NULL;
+ tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
+ old_base = DEF_OP_PTR (old_ops);
+
+ if (old_base == new_base)
+ {
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ MOVE_HEAD_TO_FREELIST (old_ops, def);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_def_op (new_base, &last);
+ new_i++;
+ }
}
- if (ro_call_vuses)
+
+ /* If there is anything remaining in the build_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_defs); new_i++)
+ add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- ggc_free (ro_call_vuses);
- ro_call_vuses = NULL;
+ old_ops->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = old_ops;
}
-}
+ /* Now set the stmt's operands. */
+ DEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ def_optype_p ptr;
+ unsigned x = 0;
+ for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
-/* All the finalize_ssa_* routines do the work required to turn the build_
- VARRAY into an operand_vector of the appropriate type. The original vector,
- if any, is passed in for comparison and virtual SSA_NAME reuse. If the
- old vector is reused, the pointer passed in is set to NULL so that
- the memory is not freed when the old operands are freed. */
+ gcc_assert (x == VEC_length (tree, build_defs));
+ }
+#endif
+}
-/* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
+/* This routine will create stmt operands for STMT from the def build list. */
-static def_optype
-finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
+static void
+finalize_ssa_defs (tree stmt)
{
- unsigned num, x;
- def_optype def_ops, old_ops;
- bool build_diff;
-
- num = VARRAY_ACTIVE_SIZE (build_defs);
- if (num == 0)
- return NULL;
+ unsigned int num = VEC_length (tree, build_defs);
/* There should only be a single real definition per assignment. */
- gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
+ gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
- old_ops = *old_ops_p;
+ /* If there is an old list, often the new list is identical, or close, so
+ find the elements at the beginning that are the same as the vector. */
+ finalize_ssa_def_ops (stmt);
+ VEC_truncate (tree, build_defs, 0);
+}
- /* Compare old vector and new array. */
- build_diff = true;
- if (old_ops && old_ops->num_defs == num)
- {
- build_diff = false;
- for (x = 0; x < num; x++)
- if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
- {
- build_diff = true;
- break;
- }
- }
+/* Takes elements from build_uses and turns them into use operands of STMT.
+ TODO -- Make build_uses VEC of tree *. */
- if (!build_diff)
- {
- def_ops = old_ops;
- *old_ops_p = NULL;
- }
- else
+static inline void
+finalize_ssa_use_ops (tree stmt)
+{
+ unsigned new_i;
+ struct use_optype_d new_list;
+ use_optype_p old_ops, ptr, last;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = USE_OPS (stmt);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- def_ops = allocate_def_optype (num);
- for (x = 0; x < num ; x++)
- def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (USE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = old_ops;
}
- VARRAY_POP_ALL (build_defs);
-
- return def_ops;
-}
+ /* Now create nodes for all the new nodes. */
+ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
+ last->next = NULL;
-/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
+ /* Now set the stmt's operands. */
+ USE_OPS (stmt) = new_list.next;
-static use_optype
-finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
-{
- unsigned num, x;
- use_optype use_ops, old_ops;
- bool build_diff;
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
- num = VARRAY_ACTIVE_SIZE (build_uses);
- if (num == 0)
- return NULL;
+ gcc_assert (x == VEC_length (tree, build_uses));
+ }
+#endif
+}
+/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
+
+static void
+finalize_ssa_uses (tree stmt)
+{
#ifdef ENABLE_CHECKING
{
unsigned x;
+ unsigned num = VEC_length (tree, build_uses);
+
/* If the pointer to the operand is the statement itself, something is
wrong. It means that we are pointing to a local variable (the
- initial call to get_stmt_operands does not pass a pointer to a
+ initial call to update_stmt_operands does not pass a pointer to a
statement). */
for (x = 0; x < num; x++)
- gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
+ gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
}
#endif
- old_ops = *old_ops_p;
-
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (old_ops && old_ops->num_uses == num)
- {
- build_diff = false;
- for (x = 0; x < num; x++)
- if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
- {
- build_diff = true;
- break;
- }
- }
-
- if (!build_diff)
- {
- use_ops = old_ops;
- *old_ops_p = NULL;
- }
- else
- {
- use_ops = allocate_use_optype (num);
- for (x = 0; x < num ; x++)
- use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
- }
- VARRAY_POP_ALL (build_uses);
-
- return use_ops;
+ finalize_ssa_use_ops (stmt);
+ VEC_truncate (tree, build_uses, 0);
}
-/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
+/* Takes elements from build_v_may_defs and turns them into maydef operands of
+ STMT. */
-static v_may_def_optype
-finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
+static inline void
+finalize_ssa_v_may_def_ops (tree stmt)
{
- unsigned num, x, i, old_num;
- v_may_def_optype v_may_def_ops, old_ops;
- tree result, var;
- bool build_diff;
+ unsigned new_i;
+ struct maydef_optype_d new_list;
+ maydef_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
- num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
- if (num == 0)
- return NULL;
+ new_list.next = NULL;
+ last = &new_list;
- old_ops = *old_ops_p;
+ old_ops = MAYDEF_OPS (stmt);
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (old_ops && old_ops->num_v_may_defs == num)
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
{
- old_num = num;
- build_diff = false;
- for (x = 0; x < num; x++)
+ act = VEC_index (tree, build_v_may_defs, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (MAYDEF_OP (old_ops));
+
+ if (old_base == new_base)
{
- var = old_ops->v_may_defs[x].def;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- if (var != VARRAY_TREE (build_v_may_defs, x))
- {
- build_diff = true;
- break;
- }
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ delink_imm_use (MAYDEF_OP_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, maydef);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_maydef_op (stmt, act, &last);
+ new_i++;
}
}
- else
- old_num = (old_ops ? old_ops->num_v_may_defs : 0);
- if (!build_diff)
- {
- v_may_def_ops = old_ops;
- *old_ops_p = NULL;
- }
- else
+ /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
+ add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- v_may_def_ops = allocate_v_may_def_optype (num);
- for (x = 0; x < num; x++)
- {
- var = VARRAY_TREE (build_v_may_defs, x);
- /* Look for VAR in the old operands vector. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->v_may_defs[i].def;
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
- break;
- }
- }
- if (i == old_num)
- {
- v_may_def_ops->v_may_defs[x].def = var;
- v_may_def_ops->v_may_defs[x].use = var;
- }
- }
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (MAYDEF_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
+ gimple_ssa_operands (cfun)->free_maydefs = old_ops;
}
- /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
+ /* Now set the stmt's operands. */
+ MAYDEF_OPS (stmt) = new_list.next;
- return v_may_def_ops;
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_v_may_defs));
+ }
+#endif
}
+static void
+finalize_ssa_v_may_defs (tree stmt)
+{
+ finalize_ssa_v_may_def_ops (stmt);
+}
+
-/* Clear the in_list bits and empty the build array for v_may_defs. */
+/* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
static inline void
cleanup_v_may_defs (void)
{
unsigned x, num;
- num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+ num = VEC_length (tree, build_v_may_defs);
for (x = 0; x < num; x++)
{
- tree t = VARRAY_TREE (build_v_may_defs, x);
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ tree t = VEC_index (tree, build_v_may_defs, x);
+ if (TREE_CODE (t) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (t);
+ ann->in_v_may_def_list = 0;
+ }
}
- VARRAY_POP_ALL (build_v_may_defs);
-}
+ VEC_truncate (tree, build_v_may_defs, 0);
+}
+
-/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
+/* Takes elements from build_vuses and turns them into vuse operands of
+ STMT. */
-static vuse_optype
-finalize_ssa_vuses (vuse_optype *old_ops_p)
+static inline void
+finalize_ssa_vuse_ops (tree stmt)
{
- unsigned num, x, i, num_v_may_defs, old_num;
- vuse_optype vuse_ops, old_ops;
- bool build_diff;
+ unsigned new_i;
+ struct vuse_optype_d new_list;
+ vuse_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = VUSE_OPS (stmt);
+
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_vuses))
+ {
+ act = VEC_index (tree, build_vuses, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (VUSE_OP (old_ops));
+
+ if (old_base == new_base)
+ {
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (VUSE_OP_PTR (last), stmt);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ delink_imm_use (USE_OP_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, vuse);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_vuse_op (stmt, act, &last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_vuses list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
+ add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
+
+ last->next = NULL;
- num = VARRAY_ACTIVE_SIZE (build_vuses);
- if (num == 0)
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- cleanup_v_may_defs ();
- return NULL;
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (VUSE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
+ gimple_ssa_operands (cfun)->free_vuses = old_ops;
}
+ /* Now set the stmt's operands. */
+ VUSE_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_vuses));
+ }
+#endif
+}
+
+/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
+
+static void
+finalize_ssa_vuses (tree stmt)
+{
+ unsigned num, num_v_may_defs;
+ unsigned vuse_index;
+
/* Remove superfluous VUSE operands. If the statement already has a
- V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
- needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
- suppose that variable 'a' is aliased:
+ V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
+ not needed because V_MAY_DEFs imply a VUSE of the variable. For
+ instance, suppose that variable 'a' is aliased:
# VUSE <a_2>
# a_3 = V_MAY_DEF <a_2>
a = a + 1;
- The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
- operation. */
+ The VUSE <a_2> is superfluous because it is implied by the
+ V_MAY_DEF operation. */
+ num = VEC_length (tree, build_vuses);
+ num_v_may_defs = VEC_length (tree, build_v_may_defs);
- num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
-
- if (num_v_may_defs > 0)
+ if (num > 0 && num_v_may_defs > 0)
{
- size_t i;
- tree vuse;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- {
- vuse = VARRAY_TREE (build_vuses, i);
+ for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
+ {
+ tree vuse;
+ vuse = VEC_index (tree, build_vuses, vuse_index);
if (TREE_CODE (vuse) != SSA_NAME)
{
var_ann_t ann = var_ann (vuse);
ann->in_vuse_list = 0;
if (ann->in_v_may_def_list)
{
- /* If we found a useless VUSE operand, remove it from the
- operand array by replacing it with the last active element
- in the operand array (unless the useless VUSE was the
- last operand, in which case we simply remove it. */
- if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
- {
- VARRAY_TREE (build_vuses, i)
- = VARRAY_TREE (build_vuses,
- VARRAY_ACTIVE_SIZE (build_vuses) - 1);
- }
- VARRAY_POP (build_vuses);
-
- /* We want to rescan the element at this index, unless
- this was the last element, in which case the loop
- terminates. */
- i--;
+ VEC_ordered_remove (tree, build_vuses, vuse_index);
+ continue;
}
}
+ vuse_index++;
}
}
else
- /* Clear out the in_list bits. */
- for (x = 0; x < num; x++)
- {
- tree t = VARRAY_TREE (build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
-
-
- num = VARRAY_ACTIVE_SIZE (build_vuses);
- /* We could have reduced the size to zero now, however. */
- if (num == 0)
- {
- cleanup_v_may_defs ();
- return NULL;
- }
-
- old_ops = *old_ops_p;
-
- /* Determine whether vuses is the same as the old vector. */
- build_diff = true;
- if (old_ops && old_ops->num_vuses == num)
{
- old_num = num;
- build_diff = false;
- for (x = 0; x < num ; x++)
- {
- tree v;
- v = old_ops->vuses[x];
- if (TREE_CODE (v) == SSA_NAME)
- v = SSA_NAME_VAR (v);
- if (v != VARRAY_TREE (build_vuses, x))
+ /* Clear out the in_list bits. */
+ for (vuse_index = 0;
+ vuse_index < VEC_length (tree, build_vuses);
+ vuse_index++)
+ {
+ tree t = VEC_index (tree, build_vuses, vuse_index);
+ if (TREE_CODE (t) != SSA_NAME)
{
- build_diff = true;
- break;
+ var_ann_t ann = var_ann (t);
+ ann->in_vuse_list = 0;
}
}
}
- else
- old_num = (old_ops ? old_ops->num_vuses : 0);
- if (!build_diff)
- {
- vuse_ops = old_ops;
- *old_ops_p = NULL;
- }
- else
- {
- vuse_ops = allocate_vuse_optype (num);
- for (x = 0; x < num; x++)
- {
- tree result, var = VARRAY_TREE (build_vuses, x);
- /* Look for VAR in the old vector, and use that SSA_NAME. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->vuses[i];
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- vuse_ops->vuses[x] = old_ops->vuses[i];
- break;
- }
- }
- if (i == old_num)
- vuse_ops->vuses[x] = var;
- }
- }
+ finalize_ssa_vuse_ops (stmt);
- /* The v_may_def build vector wasn't freed because we needed it here.
- Free it now with the vuses build vector. */
- VARRAY_POP_ALL (build_vuses);
+ /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
cleanup_v_may_defs ();
+
+ /* Free the VUSEs build vector. */
+ VEC_truncate (tree, build_vuses, 0);
- return vuse_ops;
}
-/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
+/* Takes elements from build_v_must_defs and turns them into mustdef operands of
+ STMT. */
-static v_must_def_optype
-finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
- tree stmt ATTRIBUTE_UNUSED)
+static inline void
+finalize_ssa_v_must_def_ops (tree stmt)
{
- unsigned num, x, i, old_num = 0;
- v_must_def_optype v_must_def_ops, old_ops;
- bool build_diff;
+ unsigned new_i;
+ struct mustdef_optype_d new_list;
+ mustdef_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
- num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
- if (num == 0)
- return NULL;
+ new_list.next = NULL;
+ last = &new_list;
- /* There should only be a single V_MUST_DEF per assignment. */
- gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
+ old_ops = MUSTDEF_OPS (stmt);
- old_ops = *old_ops_p;
-
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (old_ops && old_ops->num_v_must_defs == num)
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
{
- old_num = num;
- build_diff = false;
- for (x = 0; x < num; x++)
+ act = VEC_index (tree, build_v_must_defs, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (MUSTDEF_KILL (old_ops));
+
+ if (old_base == new_base)
{
- tree var = old_ops->v_must_defs[x].def;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- if (var != VARRAY_TREE (build_v_must_defs, x))
- {
- build_diff = true;
- break;
- }
+ /* If variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
+ new_i++;
}
- }
- else
- old_num = (old_ops ? old_ops->num_v_must_defs : 0);
-
- if (!build_diff)
- {
- v_must_def_ops = old_ops;
- *old_ops_p = NULL;
- }
- else
- {
- v_must_def_ops = allocate_v_must_def_optype (num);
- for (x = 0; x < num ; x++)
+ else if (old_base < new_base)
{
- tree result, var = VARRAY_TREE (build_v_must_defs, x);
- /* Look for VAR in the original vector. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->v_must_defs[i].def;
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
- v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
- break;
- }
- }
- if (i == old_num)
- {
- v_must_def_ops->v_must_defs[x].def = var;
- v_must_def_ops->v_must_defs[x].use = var;
- }
+ /* If old is less than new, old goes to the free list. */
+ delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_mustdef_op (stmt, act, &last);
+ new_i++;
}
}
- VARRAY_POP_ALL (build_v_must_defs);
- return v_must_def_ops;
-}
+ /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
+ add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
+ last->next = NULL;
-/* Finalize all the build vectors, fill the new ones into INFO. */
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (MUSTDEF_KILL_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
+ gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
+ }
+ /* Now set the stmt's operands. */
+ MUSTDEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_v_must_defs));
+ }
+#endif
+}
+
+static void
+finalize_ssa_v_must_defs (tree stmt)
+{
+ /* In the presence of subvars, there may be more than one V_MUST_DEF
+ per statement (one for each subvar). It is a bit expensive to
+ verify that all must-defs in a statement belong to subvars if
+ there is more than one must-def, so we don't do it. Suffice to
+ say, if you reach here without having subvars, and have num >1,
+ you have hit a bug. */
+ finalize_ssa_v_must_def_ops (stmt);
+ VEC_truncate (tree, build_v_must_defs, 0);
+}
+
+
+/* Finalize all the build vectors, fill the new ones into INFO. */
+
static inline void
-finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
- stmt_operands_p new_ops)
+finalize_ssa_stmt_operands (tree stmt)
{
- new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
- new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
- new_ops->v_must_def_ops
- = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
- new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
- new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
+ finalize_ssa_defs (stmt);
+ finalize_ssa_uses (stmt);
+ finalize_ssa_v_must_defs (stmt);
+ finalize_ssa_v_may_defs (stmt);
+ finalize_ssa_vuses (stmt);
}
static inline void
start_ssa_stmt_operands (void)
{
- gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
+ gcc_assert (VEC_length (tree, build_defs) == 0);
+ gcc_assert (VEC_length (tree, build_uses) == 0);
+ gcc_assert (VEC_length (tree, build_vuses) == 0);
+ gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
+ gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
}
static inline void
append_def (tree *def_p)
{
- VARRAY_PUSH_TREE_PTR (build_defs, def_p);
+ VEC_safe_push (tree, heap, build_defs, (tree)def_p);
}
static inline void
append_use (tree *use_p)
{
- VARRAY_PUSH_TREE_PTR (build_uses, use_p);
+ VEC_safe_push (tree, heap, build_uses, (tree)use_p);
}
static inline void
append_v_may_def (tree var)
{
- var_ann_t ann = get_var_ann (var);
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ var_ann_t ann = get_var_ann (var);
- /* Don't allow duplicate entries. */
- if (ann->in_v_may_def_list)
- return;
- ann->in_v_may_def_list = 1;
+ /* Don't allow duplicate entries. */
+ if (ann->in_v_may_def_list)
+ return;
+ ann->in_v_may_def_list = 1;
+ }
- VARRAY_PUSH_TREE (build_v_may_defs, var);
+ VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
}
static inline void
append_vuse (tree var)
{
-
/* Don't allow duplicate entries. */
if (TREE_CODE (var) != SSA_NAME)
{
ann->in_vuse_list = 1;
}
- VARRAY_PUSH_TREE (build_vuses, var);
+ VEC_safe_push (tree, heap, build_vuses, (tree)var);
}
unsigned i;
/* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
- if (var == VARRAY_TREE (build_v_must_defs, i))
+ for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
+ if (var == VEC_index (tree, build_v_must_defs, i))
return;
- VARRAY_PUSH_TREE (build_v_must_defs, var);
+ VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
}
-/* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
- original operands, and if ANN is non-null, appropriate stmt flags are set
- in the stmt's annotation. Note that some fields in old_ops may
- change to NULL, although none of the memory they originally pointed to
- will be destroyed. It is appropriate to call free_stmt_operands() on
- the value returned in old_ops.
- The rationale for this: Certain optimizations wish to examine the difference
- between new_ops and old_ops after processing. If a set of operands don't
- change, new_ops will simply assume the pointer in old_ops, and the old_ops
- pointer will be set to NULL, indicating no memory needs to be cleared.
- Usage might appear something like:
+/* REF is a tree that contains the entire pointer dereference
+ expression, if available, or NULL otherwise. ALIAS is the variable
+ we are asking if REF can access. OFFSET and SIZE come from the
+ memory access expression that generated this virtual operand. */
+
+static bool
+access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size)
+{
+ bool offsetgtz = offset > 0;
+ unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
+ tree base = ref ? get_base_address (ref) : NULL;
+
+ /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
+ using a call-clobbered memory tag. By definition, call-clobbered
+ memory tags can always touch .GLOBAL_VAR. */
+ if (alias == gimple_global_var (cfun))
+ return true;
+
+ /* If ALIAS is an SFT, it can't be touched if the offset
+ and size of the access is not overlapping with the SFT offset and
+ size. This is only true if we are accessing through a pointer
+ to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
+ be accessing through a pointer to some substruct of the
+ structure, and if we try to prune there, we will have the wrong
+ offset, and get the wrong answer.
+ i.e., we can't prune without more work if we have something like
+
+ struct gcc_target
+ {
+ struct asm_out
+ {
+ const char *byte_op;
+ struct asm_int_op
+ {
+ const char *hi;
+ } aligned_op;
+ } asm_out;
+ } targetm;
+
+ foo = &targetm.asm_out.aligned_op;
+ return foo->hi;
+
+ SFT.1, which represents hi, will have SFT_OFFSET=32 because in
+ terms of SFT_PARENT_VAR, that is where it is.
+ However, the access through the foo pointer will be at offset 0. */
+ if (size != -1
+ && TREE_CODE (alias) == STRUCT_FIELD_TAG
+ && base
+ && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
+ && !overlap_subvar (offset, size, alias, NULL))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
- old_ops_copy = old_ops = stmt_ann(stmt)->operands;
- build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
- <* compare old_ops_copy and new_ops *>
- free_ssa_operands (old_ops); */
+ /* Without strict aliasing, it is impossible for a component access
+ through a pointer to touch a random variable, unless that
+ variable *is* a structure or a pointer.
-static void
-build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
- stmt_operands_p new_ops)
-{
- enum tree_code code;
- tree_ann_t saved_ann = stmt->common.ann;
-
- /* Replace stmt's annotation with the one passed in for the duration
- of the operand building process. This allows "fake" stmts to be built
- and not be included in other data structures which can be built here. */
- stmt->common.ann = (tree_ann_t) ann;
-
- /* Initially assume that the statement has no volatile operands, nor
- makes aliased loads or stores. */
- if (ann)
+ That is, given p->c, and some random global variable b,
+ there is no legal way that p->c could be an access to b.
+
+ Without strict aliasing on, we consider it legal to do something
+ like:
+
+ struct foos { int l; };
+ int foo;
+ static struct foos *getfoo(void);
+ int main (void)
+ {
+ struct foos *f = getfoo();
+ f->l = 1;
+ foo = 2;
+ if (f->l == 1)
+ abort();
+ exit(0);
+ }
+ static struct foos *getfoo(void)
+ { return (struct foos *)&foo; }
+
+ (taken from 20000623-1.c)
+
+ The docs also say/imply that access through union pointers
+ is legal (but *not* if you take the address of the union member,
+ i.e. the inverse), such that you can do
+
+ typedef union {
+ int d;
+ } U;
+
+ int rv;
+ void breakme()
+ {
+ U *rv0;
+ U *pretmp = (U*)&rv;
+ rv0 = pretmp;
+ rv0->d = 42;
+ }
+ To implement this, we just punt on accesses through union
+ pointers entirely.
+ */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && (TREE_CODE (base) != INDIRECT_REF
+ || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
+ && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
+ && !var_ann (alias)->is_heapvar
+ /* When the struct has may_alias attached to it, we need not to
+ return true. */
+ && get_alias_set (base))
{
- ann->has_volatile_ops = false;
- ann->makes_aliased_stores = false;
- ann->makes_aliased_loads = false;
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
}
- start_ssa_stmt_operands ();
-
- code = TREE_CODE (stmt);
- switch (code)
+ /* If the offset of the access is greater than the size of one of
+ the possible aliases, it can't be touching that alias, because it
+ would be past the end of the structure. */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && !POINTER_TYPE_P (TREE_TYPE (alias))
+ && offsetgtz
+ && DECL_SIZE (alias)
+ && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
+ && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
{
- case MODIFY_EXPR:
- /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
- either only part of LHS is modified or if the RHS might throw,
- otherwise, use V_MUST_DEF.
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
- ??? If it might throw, we should represent somehow that it is killed
- on the fallthrough path. */
- {
- tree lhs = TREE_OPERAND (stmt, 0);
- int lhs_flags = opf_is_def;
+ return true;
+}
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
- /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
- or not the entire LHS is modified; that depends on what's
- inside the VIEW_CONVERT_EXPR. */
- if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
- lhs = TREE_OPERAND (lhs, 0);
+/* Add VAR to the virtual operands array. FLAGS is as in
+ get_expr_operands. FULL_REF is a tree that contains the entire
+ pointer dereference expression, if available, or NULL otherwise.
+ OFFSET and SIZE come from the memory access expression that
+ generated this virtual operand. FOR_CLOBBER is true is this is
+ adding a virtual operand for a call clobber. */
- if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
- && TREE_CODE (lhs) != COMPONENT_REF
- && TREE_CODE (lhs) != BIT_FIELD_REF
- && TREE_CODE (lhs) != REALPART_EXPR
- && TREE_CODE (lhs) != IMAGPART_EXPR)
- lhs_flags |= opf_kill_def;
+static void
+add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
+ tree full_ref, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size, bool for_clobber)
+{
+ VEC(tree,gc) *aliases;
+ tree sym;
+ var_ann_t v_ann;
+
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
- }
- break;
+ /* If the variable cannot be modified and this is a V_MAY_DEF change
+ it into a VUSE. This happens when read-only variables are marked
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ gimple_modify_stmt, then we can't suppress the V_MAY_DEF, lest we run
+ into validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_non_specific) && unmodifiable_var_p (var))
+ flags &= ~(opf_is_def | opf_kill_def);
+
+ /* The variable is not a GIMPLE register. Add it (or its aliases) to
+ virtual operands, unless the caller has specifically requested
+ not to add virtual operands (used when adding operands inside an
+ ADDR_EXPR expression). */
+ if (flags & opf_no_vops)
+ return;
+
+ aliases = v_ann->may_aliases;
+ if (aliases == NULL)
+ {
+ /* The variable is not aliased or it is an alias tag. */
+ if (flags & opf_is_def)
+ {
+ if (flags & opf_kill_def)
+ {
+ /* V_MUST_DEF for non-aliased, non-GIMPLE register
+ variable definitions. */
+ gcc_assert (!MTAG_P (var)
+ || TREE_CODE (var) == STRUCT_FIELD_TAG);
+ append_v_must_def (var);
+ }
+ else
+ {
+ /* Add a V_MAY_DEF for call-clobbered variables and
+ memory tags. */
+ append_v_may_def (var);
+ }
+ }
+ else
+ append_vuse (var);
+ }
+ else
+ {
+ unsigned i;
+ tree al;
+
+ /* The variable is aliased. Add its aliases to the virtual
+ operands. */
+ gcc_assert (VEC_length (tree, aliases) != 0);
+
+ if (flags & opf_is_def)
+ {
+
+ bool none_added = true;
- case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
- break;
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ {
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ none_added = false;
+ append_v_may_def (al);
+ }
- case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
- break;
+ /* If the variable is also an alias tag, add a virtual
+ operand for it, otherwise we will miss representing
+ references to the members of the variable's alias set.
+ This fixes the bug in gcc.c-torture/execute/20020503-1.c.
+
+ It is also necessary to add bare defs on clobbers for
+ SMT's, so that bare SMT uses caused by pruning all the
+ aliases will link up properly with calls. In order to
+ keep the number of these bare defs we add down to the
+ minimum necessary, we keep track of which SMT's were used
+ alone in statement vdefs or VUSEs. */
+ if (v_ann->is_aliased
+ || none_added
+ || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ && for_clobber))
+ {
+ append_v_may_def (var);
+ }
+ }
+ else
+ {
+ bool none_added = true;
+ for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
+ {
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+ none_added = false;
+ append_vuse (al);
+ }
- case ASM_EXPR:
- get_asm_expr_operands (stmt);
- break;
+ /* Similarly, append a virtual uses for VAR itself, when
+ it is an alias tag. */
+ if (v_ann->is_aliased || none_added)
+ append_vuse (var);
+ }
+ }
+}
- case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
- break;
- case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
- break;
+/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
+ get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
+ the statement's real operands, otherwise it is added to virtual
+ operands. */
- case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
- break;
+static void
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+{
+ bool is_real_op;
+ tree var, sym;
+ var_ann_t v_ann;
- /* These nodes contain no variable references. */
- case BIND_EXPR:
- case CASE_LABEL_EXPR:
- case TRY_CATCH_EXPR:
- case TRY_FINALLY_EXPR:
- case EH_FILTER_EXPR:
- case CATCH_EXPR:
- case RESX_EXPR:
- break;
+ var = *var_p;
+ gcc_assert (SSA_VAR_P (var));
- default:
- /* Notice that if get_expr_operands tries to use &STMT as the operand
- pointer (which may only happen for USE operands), we will abort in
- append_use. This default will handle statements like empty
- statements, or CALL_EXPRs that may appear on the RHS of a statement
- or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
- break;
- }
+ is_real_op = is_gimple_reg (var);
- finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
- stmt->common.ann = saved_ann;
-}
+ /* If this is a real operand, the operand is either an SSA name or a
+ decl. Virtual operands may only be decls. */
+ gcc_assert (is_real_op || DECL_P (var));
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
-/* Free any operands vectors in OPS. */
+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
-static void
-free_ssa_operands (stmt_operands_p ops)
-{
- if (ops->def_ops)
- free_defs (&(ops->def_ops));
- if (ops->use_ops)
- free_uses (&(ops->use_ops));
- if (ops->vuse_ops)
- free_vuses (&(ops->vuse_ops));
- if (ops->v_may_def_ops)
- free_v_may_defs (&(ops->v_may_def_ops));
- if (ops->v_must_def_ops)
- free_v_must_defs (&(ops->v_must_def_ops));
+ if (is_real_op)
+ {
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_is_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
}
-/* Get the operands of statement STMT. Note that repeated calls to
- get_stmt_operands for the same statement will do nothing until the
- statement is marked modified by a call to modify_stmt(). */
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
-void
-get_stmt_operands (tree stmt)
-{
- stmt_ann_t ann;
- stmt_operands_t old_operands;
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
- /* The optimizers cannot handle statements that are nothing but a
- _DECL. This indicates a bug in the gimplifier. */
- gcc_assert (!SSA_VAR_P (stmt));
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
- /* Ignore error statements. */
- if (TREE_CODE (stmt) == ERROR_MARK)
- return;
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known.
- ann = get_stmt_ann (stmt);
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
- /* If the statement has not been modified, the operands are still valid. */
- if (!ann->modified)
- return;
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags,
+ tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ tree ptr = *pptr;
+ stmt_ann_t s_ann = stmt_ann (stmt);
- timevar_push (TV_TREE_OPS);
+ /* Stores into INDIRECT_REF operands are never killing definitions. */
+ flags &= ~opf_kill_def;
- old_operands = ann->operands;
- memset (&(ann->operands), 0, sizeof (stmt_operands_t));
+ if (SSA_VAR_P (ptr))
+ {
+ struct ptr_info_def *pi = NULL;
- build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
- free_ssa_operands (&old_operands);
+ /* If PTR has flow-sensitive points-to information, use it. */
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
+ {
+ /* PTR has its own memory tag. Use it. */
+ add_virtual_operand (pi->name_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+ }
+ else
+ {
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its symbol memory tag. */
+ var_ann_t v_ann;
- /* Clear the modified bit for STMT. Subsequent calls to
- get_stmt_operands for this statement will do nothing until the
- statement is marked modified by a call to modify_stmt(). */
- ann->modified = 0;
+ /* If we are emitting debugging dumps, display a warning if
+ PTR is an SSA_NAME with no flow-sensitive alias
+ information. That means that we may need to compute
+ aliasing again. */
+ if (dump_file
+ && TREE_CODE (ptr) == SSA_NAME
+ && pi == NULL)
+ {
+ fprintf (dump_file,
+ "NOTE: no flow-sensitive alias info for ");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, " in ");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ }
- timevar_pop (TV_TREE_OPS);
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+ v_ann = var_ann (ptr);
+
+ if (v_ann->symbol_mem_tag)
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+ }
+ }
+ else if (TREE_CODE (ptr) == INTEGER_CST)
+ {
+ /* If a constant is used as a pointer, we can't generate a real
+ operand for it but we mark the statement volatile to prevent
+ optimizations from messing things up. */
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+ else
+ {
+ /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
+ gcc_unreachable ();
+ }
+
+ /* If requested, add a USE operand for the base pointer. */
+ if (recurse_on_base)
+ get_expr_operands (stmt, pptr, opf_none);
}
-/* Recursively scan the expression pointed by EXPR_P in statement referred to
- by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
- operands found. */
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
-get_expr_operands (tree stmt, tree *expr_p, int flags)
+get_tmr_operands (tree stmt, tree expr, int flags)
{
- enum tree_code code;
- enum tree_code_class class;
- tree expr = *expr_p;
+ tree tag = TMR_TAG (expr), ref;
+ HOST_WIDE_INT offset, size, maxsize;
+ subvar_t svars, sv;
stmt_ann_t s_ann = stmt_ann (stmt);
- if (expr == NULL || expr == error_mark_node)
- return;
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
- code = TREE_CODE (expr);
- class = TREE_CODE_CLASS (code);
+ /* MEM_REFs should never be killing. */
+ flags &= ~opf_kill_def;
- switch (code)
+ if (TMR_SYMBOL (expr))
{
- case ADDR_EXPR:
- /* We could have the address of a component, array member,
- etc which has interesting variable references. */
- /* Taking the address of a variable does not represent a
- reference to it, but the fact that the stmt takes its address will be
- of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, s_ann, 0);
-
- /* If the address is invariant, there may be no interesting variable
- references inside. */
- if (is_gimple_min_invariant (expr))
- return;
-
- /* There should be no VUSEs created, since the referenced objects are
- not really accessed. The only operands that we should find here
- are ARRAY_REF indices which will always be real operands (GIMPLE
- does not allow non-registers as array indices). */
- flags |= opf_no_vops;
+ stmt_ann_t ann = stmt_ann (stmt);
+ add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
+ }
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ if (!tag)
+ {
+ /* Something weird, so ensure that we will be careful. */
+ stmt_ann (stmt)->has_volatile_ops = true;
return;
+ }
- case SSA_NAME:
- case VAR_DECL:
- case PARM_DECL:
- case RESULT_DECL:
- case CONST_DECL:
- /* If we found a variable, add it to DEFS or USES depending
- on the operand flags. */
- add_stmt_operand (expr_p, s_ann, flags);
+ if (DECL_P (tag))
+ {
+ get_expr_operands (stmt, &tag, flags);
return;
+ }
- case MISALIGNED_INDIRECT_REF:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- /* fall through */
+ ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
+ gcc_assert (ref != NULL_TREE);
+ svars = get_subvars_for_var (ref);
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bool exact;
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ {
+ int subvar_flags = flags;
+ if (!exact || size != maxsize)
+ subvar_flags &= ~opf_kill_def;
+ add_stmt_operand (&sv->var, s_ann, subvar_flags);
+ }
+ }
+}
- case ALIGN_INDIRECT_REF:
- case INDIRECT_REF:
- get_indirect_ref_operands (stmt, expr, flags);
- return;
- case ARRAY_REF:
- case ARRAY_RANGE_REF:
- /* Treat array references as references to the virtual variable
- representing the array. The virtual variable for an ARRAY_REF
- is the VAR_DECL for the array. */
-
- /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
- according to the value of IS_DEF. Recurse if the LHS of the
- ARRAY_REF node is not a regular variable. */
- if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, s_ann, flags);
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
+ clobbered variables in the function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
- return;
-
- case COMPONENT_REF:
- case REALPART_EXPR:
- case IMAGPART_EXPR:
- /* Similarly to arrays, references to compound variables (complex
- types and structures/unions) are globbed.
-
- FIXME: This means that
-
- a.x = 6;
- a.y = 7;
- foo (a.x, a.y);
-
- will not be constant propagated because the two partial
- definitions to 'a' will kill each other. Note that SRA may be
- able to fix this problem if 'a' can be scalarized. */
-
- /* If the LHS of the compound reference is not a regular variable,
- recurse to keep looking for more operands in the subexpression. */
- if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, s_ann, flags);
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-
- if (code == COMPONENT_REF)
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- return;
-
- case WITH_SIZE_EXPR:
- /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
- and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- return;
-
- case CALL_EXPR:
- get_call_expr_operands (stmt, expr);
- return;
+static void
+add_call_clobber_ops (tree stmt, tree callee)
+{
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b, not_written_b;
+
+ /* Functions that are not const, pure or never return may clobber
+ call-clobbered variables. */
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
- case COND_EXPR:
- case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
+ for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_is_def);
return;
+ }
- case MODIFY_EXPR:
- {
- int subflags;
- tree op;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
-
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == WITH_SIZE_EXPR)
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == ARRAY_REF
- || TREE_CODE (op) == ARRAY_RANGE_REF
- || TREE_CODE (op) == COMPONENT_REF
- || TREE_CODE (op) == REALPART_EXPR
- || TREE_CODE (op) == IMAGPART_EXPR)
- subflags = opf_is_def;
- else
- subflags = opf_is_def | opf_kill_def;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
- return;
- }
+ /* Get info for local and module level statics. There is a bit
+ set for each static if the call being processed does not read
+ or write that variable. */
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+ not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
+ /* Add a V_MAY_DEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var_lookup (u);
+ unsigned int escape_mask = var_ann (var)->escape_mask;
+ tree real_var = var;
+ bool not_read;
+ bool not_written;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b ? bitmap_bit_p (not_read_b,
+ DECL_UID (real_var)) : false;
+ not_written = not_written_b ? bitmap_bit_p (not_written_b,
+ DECL_UID (real_var)) : false;
+ gcc_assert (!unmodifiable_var_p (var));
+
+ clobber_stats.clobbered_vars++;
+
+ /* See if this variable is really clobbered by this function. */
+
+ /* Trivial case: Things escaping only to pure/const are not
+ clobbered by non-pure-const, and only read by pure/const. */
+ if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
+ {
+ tree call = get_call_expr_in (stmt);
+ if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ {
+ add_stmt_operand (&var, s_ann, opf_none);
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ else
+ {
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ }
+
+ if (not_written)
+ {
+ clobber_stats.static_write_clobbers_avoided++;
+ if (!not_read)
+ add_stmt_operand (&var, s_ann, opf_none);
+ else
+ clobber_stats.static_read_clobbers_avoided++;
+ }
+ else
+ add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
+ }
+}
- case CONSTRUCTOR:
- {
- /* General aggregate CONSTRUCTORs have been decomposed, but they
- are still in use as the COMPLEX_EXPR equivalent for vectors. */
- tree t;
- for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
- get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
+/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
+ function. */
- return;
- }
+static void
+add_call_read_ops (tree stmt, tree callee)
+{
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b;
- case TRUTH_NOT_EXPR:
- case BIT_FIELD_REF:
- case VIEW_CONVERT_EXPR:
- do_unary:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_none);
return;
+ }
+
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
- case COMPOUND_EXPR:
- case OBJ_TYPE_REF:
- do_binary:
- {
- tree op0 = TREE_OPERAND (expr, 0);
- tree op1 = TREE_OPERAND (expr, 1);
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var (u);
+ tree real_var = var;
+ bool not_read;
+
+ clobber_stats.readonly_clobbers++;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ if (not_read)
+ {
+ clobber_stats.static_readonly_clobbers_avoided++;
+ continue;
+ }
+
+ add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
+ }
+}
- /* If it would be profitable to swap the operands, then do so to
- canonicalize the statement, enabling better optimization.
- By placing canonicalization of such expressions here we
- transparently keep statements in canonical form, even
- when the statement is modified. */
- if (tree_swap_operands_p (op0, op1, false))
- {
- /* For relationals we need to swap the operands
- and change the code. */
- if (code == LT_EXPR
- || code == GT_EXPR
- || code == LE_EXPR
- || code == GE_EXPR)
- {
- TREE_SET_CODE (expr, swap_tree_comparison (code));
- TREE_OPERAND (expr, 0) = op1;
- TREE_OPERAND (expr, 1) = op0;
- }
-
- /* For a commutative operator we can just swap the operands. */
- else if (commutative_tree_code (code))
- {
- TREE_OPERAND (expr, 0) = op1;
- TREE_OPERAND (expr, 1) = op0;
- }
- }
+/* A subroutine of get_expr_operands to handle CALL_EXPR. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- return;
- }
+static void
+get_call_expr_operands (tree stmt, tree expr)
+{
+ tree op;
+ int call_flags = call_expr_flags (expr);
- case REALIGN_LOAD_EXPR:
- {
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
- return;
- }
+ /* If aliases have been computed already, add V_MAY_DEF or V_USE
+ operands for all the symbols that have been found to be
+ call-clobbered.
+
+ Note that if aliases have not been computed, the global effects
+ of calls will not be included in the SSA web. This is fine
+ because no optimizer should run before aliases have been
+ computed. By not bothering with virtual operands for CALL_EXPRs
+ we avoid adding superfluous virtual operands, which can be a
+ significant compile time sink (See PR 15855). */
+ if (gimple_aliases_computed_p (cfun)
+ && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
+ && !(call_flags & ECF_NOVOPS))
+ {
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
+ if (TREE_SIDE_EFFECTS (expr)
+ && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+ add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+ else if (!(call_flags & ECF_CONST))
+ add_call_read_ops (stmt, get_callee_fndecl (expr));
+ }
- case BLOCK:
- case FUNCTION_DECL:
- case EXC_PTR_EXPR:
- case FILTER_EXPR:
- case LABEL_DECL:
- /* Expressions that make no memory references. */
- return;
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- default:
- if (class == tcc_unary)
- goto do_unary;
- if (class == tcc_binary || class == tcc_comparison)
- goto do_binary;
- if (class == tcc_constant || class == tcc_type)
- return;
- }
+ for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
- /* If we get here, something has gone wrong. */
-#ifdef ENABLE_CHECKING
- fprintf (stderr, "unhandled expression in get_expr_operands():\n");
- debug_tree (expr);
- fputs ("\n", stderr);
- internal_error ("internal error");
-#endif
- gcc_unreachable ();
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
}
for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
{
- oconstraints[i] = constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_output_constraint (&constraint, i, 0, 0,
- &allows_mem, &allows_reg, &is_inout);
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
/* This should have been split in gimplify_asm_expr. */
gcc_assert (!allows_reg || !is_inout);
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
{
- constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
+ oconstraints, &allows_mem, &allows_reg);
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
get_expr_operands (stmt, &TREE_VALUE (link), 0);
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def);
- }
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
+ }
/* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
{
tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def);
+
+ /* Subvars are explicitly represented in this list, so
+ we don't need the original to be added to the clobber
+ ops, but the original *will* be in this list because
+ we keep the addressability of the original
+ variable up-to-date so we don't screw up the rest of
+ the backend. */
+ if (var_can_have_subvars (var)
+ && get_subvars_for_var (var) != NULL)
+ continue;
+
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
}
break;
}
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
+
+/* Scan operands for the assignment expression EXPR in statement STMT. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags)
+get_modify_stmt_operands (tree stmt, tree expr)
{
- tree *pptr = &TREE_OPERAND (expr, 0);
- tree ptr = *pptr;
- stmt_ann_t s_ann = stmt_ann (stmt);
+ /* First get operands from the RHS. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_none);
+
+ /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
+ registers. If the LHS is a store to memory, we will either need
+ a preserving definition (V_MAY_DEF) or a killing definition
+ (V_MUST_DEF).
+
+ Preserving definitions are those that modify a part of an
+ aggregate object for which no subvars have been computed (or the
+ reference does not correspond exactly to one of them). Stores
+ through a pointer are also represented with V_MAY_DEF operators.
+
+ The determination of whether to use a preserving or a killing
+ definition is done while scanning the LHS of the assignment. By
+ default, assume that we will emit a V_MUST_DEF. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0),
+ opf_is_def|opf_kill_def);
+}
- /* Stores into INDIRECT_REF operands are never killing definitions. */
- flags &= ~opf_kill_def;
- if (SSA_VAR_P (ptr))
- {
- struct ptr_info_def *pi = NULL;
+/* Recursively scan the expression pointed to by EXPR_P in statement
+ STMT. FLAGS is one of the OPF_* constants modifying how to
+ interpret the operands found. */
- /* If PTR has flow-sensitive points-to information, use it. */
- if (TREE_CODE (ptr) == SSA_NAME
- && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
- && pi->name_mem_tag)
- {
- /* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
- }
- else
- {
- /* If PTR is not an SSA_NAME or it doesn't have a name
- tag, use its type memory tag. */
- var_ann_t v_ann;
+static void
+get_expr_operands (tree stmt, tree *expr_p, int flags)
+{
+ enum tree_code code;
+ enum tree_code_class class;
+ tree expr = *expr_p;
+ stmt_ann_t s_ann = stmt_ann (stmt);
- /* If we are emitting debugging dumps, display a warning if
- PTR is an SSA_NAME with no flow-sensitive alias
- information. That means that we may need to compute
- aliasing again. */
- if (dump_file
- && TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
- {
- fprintf (dump_file,
- "NOTE: no flow-sensitive alias info for ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
- }
+ if (expr == NULL)
+ return;
- if (TREE_CODE (ptr) == SSA_NAME)
- ptr = SSA_NAME_VAR (ptr);
- v_ann = var_ann (ptr);
- if (v_ann->type_mem_tag)
- add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
- }
- }
+ code = TREE_CODE (expr);
+ class = TREE_CODE_CLASS (code);
- /* If a constant is used as a pointer, we can't generate a real
- operand for it but we mark the statement volatile to prevent
- optimizations from messing things up. */
- else if (TREE_CODE (ptr) == INTEGER_CST)
+ switch (code)
{
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
- }
+ case ADDR_EXPR:
+ /* Taking the address of a variable does not represent a
+ reference to it, but the fact that the statement takes its
+ address will be of interest to some passes (e.g. alias
+ resolution). */
+ add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
- /* Everything else *should* have been folded elsewhere, but users
- are smarter than we in finding ways to write invalid code. We
- cannot just abort here. If we were absolutely certain that we
- do handle all valid cases, then we could just do nothing here.
- That seems optimistic, so attempt to do something logical... */
- else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
- {
- /* Make sure we know the object is addressable. */
- pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, s_ann, 0);
+ /* If the address is invariant, there may be no interesting
+ variable references inside. */
+ if (is_gimple_min_invariant (expr))
+ return;
- /* Mark the object itself with a VUSE. */
- pptr = &TREE_OPERAND (*pptr, 0);
- get_expr_operands (stmt, pptr, flags);
+ /* Otherwise, there may be variables referenced inside but there
+ should be no VUSEs created, since the referenced objects are
+ not really accessed. The only operands that we should find
+ here are ARRAY_REF indices which will always be real operands
+ (GIMPLE does not allow non-registers as array indices). */
+ flags |= opf_no_vops;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
- }
- /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
- else
- gcc_unreachable ();
+ case SSA_NAME:
+ case STRUCT_FIELD_TAG:
+ case SYMBOL_MEMORY_TAG:
+ case NAME_MEMORY_TAG:
+ add_stmt_operand (expr_p, s_ann, flags);
+ return;
- /* Add a USE operand for the base pointer. */
- get_expr_operands (stmt, pptr, opf_none);
-}
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ {
+ subvar_t svars;
+
+ /* Add the subvars for a variable, if it has subvars, to DEFS
+ or USES. Otherwise, add the variable itself. Whether it
+ goes to USES or DEFS depends on the operand flags. */
+ if (var_can_have_subvars (expr)
+ && (svars = get_subvars_for_var (expr)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ add_stmt_operand (&sv->var, s_ann, flags);
+ }
+ else
+ add_stmt_operand (expr_p, s_ann, flags);
-/* A subroutine of get_expr_operands to handle CALL_EXPR. */
+ return;
+ }
-static void
-get_call_expr_operands (tree stmt, tree expr)
-{
- tree op;
- int call_flags = call_expr_flags (expr);
-
- /* If aliases have been computed already, add V_MAY_DEF or V_USE
- operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
- if (aliases_computed_p && !bitmap_empty_p (call_clobbered_vars))
- {
- /* A 'pure' or a 'const' functions never call clobber anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
- if (TREE_SIDE_EFFECTS (expr)
- && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt);
- else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt);
- }
+ case MISALIGNED_INDIRECT_REF:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ /* fall through */
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
+ return;
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+ case TARGET_MEM_REF:
+ get_tmr_operands (stmt, expr, flags);
+ return;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case COMPONENT_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ {
+ tree ref;
+ HOST_WIDE_INT offset, size, maxsize;
+ bool none = true;
+
+ /* This component reference becomes an access to all of the
+ subvariables it can touch, if we can determine that, but
+ *NOT* the real one. If we can't determine which fields we
+ could touch, the recursion will eventually get to a
+ variable and add *all* of its subvars, or whatever is the
+ minimum correct subset. */
+ ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
+ if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
+ {
+ subvar_t sv;
+ subvar_t svars = get_subvars_for_var (ref);
-}
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bool exact;
+
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ {
+ int subvar_flags = flags;
+ none = false;
+ if (!exact || size != maxsize)
+ subvar_flags &= ~opf_kill_def;
+ add_stmt_operand (&sv->var, s_ann, subvar_flags);
+ }
+ }
+ if (!none)
+ flags |= opf_no_vops;
+ }
+ else if (TREE_CODE (ref) == INDIRECT_REF)
+ {
+ get_indirect_ref_operands (stmt, ref, flags, expr, offset,
+ maxsize, false);
+ flags |= opf_no_vops;
+ }
-/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
- get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
- the statement's real operands, otherwise it is added to virtual
- operands. */
+ /* Even if we found subvars above we need to ensure to see
+ immediate uses for d in s.a[d]. In case of s.a having
+ a subvar or we would miss it otherwise. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
+ flags & ~opf_kill_def);
+
+ if (code == COMPONENT_REF)
+ {
+ if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ s_ann->has_volatile_ops = true;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ }
+ else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
+ }
-static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
-{
- bool is_real_op;
- tree var, sym;
- var_ann_t v_ann;
+ return;
+ }
- var = *var_p;
- STRIP_NOPS (var);
+ case WITH_SIZE_EXPR:
+ /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
+ and an rvalue reference to its second argument. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ return;
- /* If the operand is an ADDR_EXPR, add its operand to the list of
- variables that have had their address taken in this statement. */
- if (TREE_CODE (var) == ADDR_EXPR)
- {
- note_addressable (TREE_OPERAND (var, 0), s_ann);
+ case CALL_EXPR:
+ get_call_expr_operands (stmt, expr);
return;
- }
- /* If the original variable is not a scalar, it will be added to the list
- of virtual operands. In that case, use its base symbol as the virtual
- variable representing it. */
- is_real_op = is_gimple_reg (var);
- if (!is_real_op && !DECL_P (var))
- var = get_virtual_var (var);
+ case COND_EXPR:
+ case VEC_COND_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ return;
- /* If VAR is not a variable that we care to optimize, do nothing. */
- if (var == NULL_TREE || !SSA_VAR_P (var))
- return;
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, expr);
+ return;
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
+ case CONSTRUCTOR:
+ {
+ /* General aggregate CONSTRUCTORs have been decomposed, but they
+ are still in use as the COMPLEX_EXPR equivalent for vectors. */
+ constructor_elt *ce;
+ unsigned HOST_WIDE_INT idx;
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
+ for (idx = 0;
+ VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
+ idx++)
+ get_expr_operands (stmt, &ce->value, opf_none);
- if (is_real_op)
- {
- /* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
- append_def (var_p);
- else
- append_use (var_p);
- }
- else
- {
- varray_type aliases;
-
- /* The variable is not a GIMPLE register. Add it (or its aliases) to
- virtual operands, unless the caller has specifically requested
- not to add virtual operands (used when adding operands inside an
- ADDR_EXPR expression). */
- if (flags & opf_no_vops)
return;
+ }
- aliases = v_ann->may_aliases;
+ case BIT_FIELD_REF:
+ /* Stores using BIT_FIELD_REF are always preserving definitions. */
+ flags &= ~opf_kill_def;
- if (aliases == NULL)
- {
- /* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- {
- if (flags & opf_kill_def)
- {
- /* Only regular variables may get a V_MUST_DEF
- operand. */
- gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG);
- /* V_MUST_DEF for non-aliased, non-GIMPLE register
- variable definitions. */
- append_v_must_def (var);
- }
- else
- {
- /* Add a V_MAY_DEF for call-clobbered variables and
- memory tags. */
- append_v_may_def (var);
- }
- }
- else
- {
- append_vuse (var);
- if (s_ann && v_ann->is_alias_tag)
- s_ann->makes_aliased_loads = 1;
- }
- }
- else
- {
- size_t i;
+ /* Fallthru */
- /* The variable is aliased. Add its aliases to the virtual
- operands. */
- gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
+ case TRUTH_NOT_EXPR:
+ case VIEW_CONVERT_EXPR:
+ do_unary:
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ return;
- if (flags & opf_is_def)
- {
- /* If the variable is also an alias tag, add a virtual
- operand for it, otherwise we will miss representing
- references to the members of the variable's alias set.
- This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
- if (v_ann->is_alias_tag)
- append_v_may_def (var);
-
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_v_may_def (VARRAY_TREE (aliases, i));
-
- if (s_ann)
- s_ann->makes_aliased_stores = 1;
- }
- else
- {
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_alias_tag)
- append_vuse (var);
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ case COMPOUND_EXPR:
+ case OBJ_TYPE_REF:
+ case ASSERT_EXPR:
+ do_binary:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ return;
+ }
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_vuse (VARRAY_TREE (aliases, i));
+ case DOT_PROD_EXPR:
+ case REALIGN_LOAD_EXPR:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+ return;
+ }
- if (s_ann)
- s_ann->makes_aliased_loads = 1;
- }
- }
+ case BLOCK:
+ case FUNCTION_DECL:
+ case EXC_PTR_EXPR:
+ case FILTER_EXPR:
+ case LABEL_DECL:
+ case CONST_DECL:
+ case OMP_PARALLEL:
+ case OMP_SECTIONS:
+ case OMP_FOR:
+ case OMP_SINGLE:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
+ /* Expressions that make no memory references. */
+ return;
+
+ default:
+ if (class == tcc_unary)
+ goto do_unary;
+ if (class == tcc_binary || class == tcc_comparison)
+ goto do_binary;
+ if (class == tcc_constant || class == tcc_type)
+ return;
}
+
+ /* If we get here, something has gone wrong. */
+#ifdef ENABLE_CHECKING
+ fprintf (stderr, "unhandled expression in get_expr_operands():\n");
+ debug_tree (expr);
+ fputs ("\n", stderr);
+#endif
+ gcc_unreachable ();
}
-/* Record that VAR had its address taken in the statement with annotations
- S_ANN. */
+/* Parse STMT looking for operands. When finished, the various
+ build_* operand vectors will have potential operands in them. */
static void
-note_addressable (tree var, stmt_ann_t s_ann)
+parse_ssa_operands (tree stmt)
{
- if (!s_ann)
- return;
+ enum tree_code code;
- var = get_base_address (var);
- if (var && SSA_VAR_P (var))
+ code = TREE_CODE (stmt);
+ switch (code)
{
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, stmt);
+ break;
+
+ case COND_EXPR:
+ get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
+ break;
+
+ case SWITCH_EXPR:
+ get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
+ break;
+
+ case ASM_EXPR:
+ get_asm_expr_operands (stmt);
+ break;
+
+ case RETURN_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
+ break;
+
+ case GOTO_EXPR:
+ get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
+ break;
+
+ case LABEL_EXPR:
+ get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
+ break;
+
+ case BIND_EXPR:
+ case CASE_LABEL_EXPR:
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ case EH_FILTER_EXPR:
+ case CATCH_EXPR:
+ case RESX_EXPR:
+ /* These nodes contain no variable references. */
+ break;
+
+ default:
+ /* Notice that if get_expr_operands tries to use &STMT as the
+ operand pointer (which may only happen for USE operands), we
+ will fail in add_stmt_operand. This default will handle
+ statements like empty statements, or CALL_EXPRs that may
+ appear on the RHS of a statement or as statements themselves. */
+ get_expr_operands (stmt, &stmt, opf_none);
+ break;
}
}
-/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
- clobbered variables in the function. */
+/* Create an operands cache for STMT. */
static void
-add_call_clobber_ops (tree stmt)
+build_ssa_operands (tree stmt)
{
- unsigned i;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
+ stmt_ann_t ann = get_stmt_ann (stmt);
+
+ /* Initially assume that the statement has no volatile operands. */
+ if (ann)
+ ann->has_volatile_ops = false;
- /* Functions that are not const, pure or never return may clobber
- call-clobbered variables. */
- if (s_ann)
- s_ann->makes_clobbering_call = true;
+ start_ssa_stmt_operands ();
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- return;
- }
+ parse_ssa_operands (stmt);
+ operand_build_sort_virtual (build_vuses);
+ operand_build_sort_virtual (build_v_may_defs);
+ operand_build_sort_virtual (build_v_must_defs);
- /* If cache is valid, copy the elements into the build vectors. */
- if (ssa_call_clobbered_cache_valid)
- {
- for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
- {
- t = VARRAY_TREE (clobbered_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- VARRAY_PUSH_TREE (build_vuses, t);
- }
- for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
- {
- t = VARRAY_TREE (clobbered_v_may_defs, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_v_may_def_list = 1;
- VARRAY_PUSH_TREE (build_v_may_defs, t);
- }
- if (s_ann)
- {
- s_ann->makes_aliased_loads = clobbered_aliased_loads;
- s_ann->makes_aliased_stores = clobbered_aliased_stores;
- }
- return;
- }
+ finalize_ssa_stmt_operands (stmt);
+}
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
- /* Add a V_MAY_DEF operand for every call clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- if (TREE_READONLY (var)
- && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
- add_stmt_operand (&var, &empty_ann, opf_none);
- else
- add_stmt_operand (&var, &empty_ann, opf_is_def);
- }
+/* Free any operands vectors in OPS. */
- clobbered_aliased_loads = empty_ann.makes_aliased_loads;
- clobbered_aliased_stores = empty_ann.makes_aliased_stores;
+void
+free_ssa_operands (stmt_operands_p ops)
+{
+ ops->def_ops = NULL;
+ ops->use_ops = NULL;
+ ops->maydef_ops = NULL;
+ ops->mustdef_ops = NULL;
+ ops->vuse_ops = NULL;
+}
- /* Set the flags for a stmt's annotation. */
- if (s_ann)
+
+/* Get the operands of statement STMT. */
+
+void
+update_stmt_operands (tree stmt)
+{
+ stmt_ann_t ann = get_stmt_ann (stmt);
+
+ /* If update_stmt_operands is called before SSA is initialized, do
+ nothing. */
+ if (!ssa_operands_active ())
+ return;
+
+ /* The optimizers cannot handle statements that are nothing but a
+ _DECL. This indicates a bug in the gimplifier. */
+ gcc_assert (!SSA_VAR_P (stmt));
+
+ gcc_assert (ann->modified);
+
+ timevar_push (TV_TREE_OPS);
+
+ build_ssa_operands (stmt);
+
+ /* Clear the modified bit for STMT. */
+ ann->modified = 0;
+
+ timevar_pop (TV_TREE_OPS);
+}
+
+
+/* Copies virtual operands from SRC to DST. */
+
+void
+copy_virtual_operands (tree dest, tree src)
+{
+ tree t;
+ ssa_op_iter iter, old_iter;
+ use_operand_p use_p, u2;
+ def_operand_p def_p, d2;
+
+ build_ssa_operands (dest);
+
+ /* Copy all the virtual fields. */
+ FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
+ append_vuse (t);
+ FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
+ append_v_may_def (t);
+ FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
+ append_v_must_def (t);
+
+ if (VEC_length (tree, build_vuses) == 0
+ && VEC_length (tree, build_v_may_defs) == 0
+ && VEC_length (tree, build_v_must_defs) == 0)
+ return;
+
+ /* Now commit the virtual operands to this stmt. */
+ finalize_ssa_v_must_defs (dest);
+ finalize_ssa_v_may_defs (dest);
+ finalize_ssa_vuses (dest);
+
+ /* Finally, set the field to the same values as then originals. */
+ t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
+ FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
{
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
- s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
+ gcc_assert (!op_iter_done (&old_iter));
+ SET_USE (use_p, t);
+ t = op_iter_next_tree (&old_iter);
}
+ gcc_assert (op_iter_done (&old_iter));
- /* Prepare empty cache vectors. */
- if (clobbered_v_may_defs)
+ op_iter_init_maydef (&old_iter, src, &u2, &d2);
+ FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
{
- VARRAY_POP_ALL (clobbered_vuses);
- VARRAY_POP_ALL (clobbered_v_may_defs);
+ gcc_assert (!op_iter_done (&old_iter));
+ SET_USE (use_p, USE_FROM_PTR (u2));
+ SET_DEF (def_p, DEF_FROM_PTR (d2));
+ op_iter_next_maymustdef (&u2, &d2, &old_iter);
}
- else
+ gcc_assert (op_iter_done (&old_iter));
+
+ op_iter_init_mustdef (&old_iter, src, &u2, &d2);
+ FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
{
- VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
- VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
+ gcc_assert (!op_iter_done (&old_iter));
+ SET_USE (use_p, USE_FROM_PTR (u2));
+ SET_DEF (def_p, DEF_FROM_PTR (d2));
+ op_iter_next_maymustdef (&u2, &d2, &old_iter);
}
+ gcc_assert (op_iter_done (&old_iter));
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
- VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
-
- ssa_call_clobbered_cache_valid = true;
}
-/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
- function. */
+/* Specifically for use in DOM's expression analysis. Given a store, we
+ create an artificial stmt which looks like a load from the store, this can
+ be used to eliminate redundant loads. OLD_OPS are the operands from the
+ store stmt, and NEW_STMT is the new load which represents a load of the
+ values stored. */
-static void
-add_call_read_ops (tree stmt)
+void
+create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
{
- unsigned i;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
+ stmt_ann_t ann;
+ tree op;
+ ssa_op_iter iter;
+ use_operand_p use_p;
+ unsigned x;
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
+ ann = get_stmt_ann (new_stmt);
+
+ /* Process the stmt looking for operands. */
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (new_stmt);
+
+ for (x = 0; x < VEC_length (tree, build_vuses); x++)
{
- add_stmt_operand (&global_var, s_ann, opf_none);
- return;
+ tree t = VEC_index (tree, build_vuses, x);
+ if (TREE_CODE (t) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (t);
+ ann->in_vuse_list = 0;
+ }
}
-
- /* If cache is valid, copy the elements into the build vector. */
- if (ssa_ro_call_cache_valid)
+
+ for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
{
- for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
+ tree t = VEC_index (tree, build_v_may_defs, x);
+ if (TREE_CODE (t) != SSA_NAME)
{
- t = VARRAY_TREE (ro_call_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- VARRAY_PUSH_TREE (build_vuses, t);
+ var_ann_t ann = var_ann (t);
+ ann->in_v_may_def_list = 0;
}
- if (s_ann)
- s_ann->makes_aliased_loads = ro_call_aliased_loads;
- return;
}
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+ /* Remove any virtual operands that were found. */
+ VEC_truncate (tree, build_v_may_defs, 0);
+ VEC_truncate (tree, build_v_must_defs, 0);
+ VEC_truncate (tree, build_vuses, 0);
- /* Add a VUSE for each call-clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ /* For each VDEF on the original statement, we want to create a
+ VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
+ statement. */
+ FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
+ (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
+ append_vuse (op);
+
+ /* Now build the operands for this new stmt. */
+ finalize_ssa_stmt_operands (new_stmt);
+
+ /* All uses in this fake stmt must not be in the immediate use lists. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
+}
+
+
+/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
+ to test the validity of the swap operation. */
+
+void
+swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
+{
+ tree op0, op1;
+ op0 = *exp0;
+ op1 = *exp1;
+
+ /* If the operand cache is active, attempt to preserve the relative
+ positions of these two operands in their respective immediate use
+ lists. */
+ if (ssa_operands_active () && op0 != op1)
{
- tree var = referenced_var (i);
- add_stmt_operand (&var, &empty_ann, opf_none);
- }
+ use_optype_p use0, use1, ptr;
+ use0 = use1 = NULL;
- ro_call_aliased_loads = empty_ann.makes_aliased_loads;
- if (s_ann)
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ /* Find the 2 operands in the cache, if they are there. */
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp0)
+ {
+ use0 = ptr;
+ break;
+ }
- /* Prepare empty cache vectors. */
- if (ro_call_vuses)
- VARRAY_POP_ALL (ro_call_vuses);
- else
- VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp1)
+ {
+ use1 = ptr;
+ break;
+ }
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+ /* If both uses don't have operand entries, there isn't much we can do
+ at this point. Presumably we don't need to worry about it. */
+ if (use0 && use1)
+ {
+ tree *tmp = USE_OP_PTR (use1)->use;
+ USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
+ USE_OP_PTR (use0)->use = tmp;
+ }
+ }
- ssa_ro_call_cache_valid = true;
+ /* Now swap the data. */
+ *exp0 = op1;
+ *exp1 = op0;
}
-/* Copies virtual operands from SRC to DST. */
+
+/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
+ *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
+ a single variable whose address has been taken or any other valid
+ GIMPLE memory reference (structure reference, array, etc). If the
+ base address of REF is a decl that has sub-variables, also add all
+ of its sub-variables. */
void
-copy_virtual_operands (tree dst, tree src)
+add_to_addressable_set (tree ref, bitmap *addresses_taken)
{
- unsigned i;
- vuse_optype vuses = STMT_VUSE_OPS (src);
- v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
- v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
- vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
- v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
- v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
-
- if (vuses)
- {
- *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
- for (i = 0; i < NUM_VUSES (vuses); i++)
- SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
- }
+ tree var;
+ subvar_t svars;
+
+ gcc_assert (addresses_taken);
- if (v_may_defs)
+ /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
+ as the only thing we take the address of. If VAR is a structure,
+ taking the address of a field means that the whole structure may
+ be referenced using pointer arithmetic. See PR 21407 and the
+ ensuing mailing list discussion. */
+ var = get_base_address (ref);
+ if (var && SSA_VAR_P (var))
{
- *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
- for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
+ if (*addresses_taken == NULL)
+ *addresses_taken = BITMAP_GGC_ALLOC ();
+
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
+ TREE_ADDRESSABLE (sv->var) = 1;
+ }
+ }
+ else
{
- SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
- SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
- V_MAY_DEF_RESULT (v_may_defs, i));
+ bitmap_set_bit (*addresses_taken, DECL_UID (var));
+ TREE_ADDRESSABLE (var) = 1;
}
}
+}
- if (v_must_defs)
+
+/* Scan the immediate_use list for VAR making sure its linked properly.
+ Return TRUE if there is a problem and emit an error message to F. */
+
+bool
+verify_imm_links (FILE *f, tree var)
+{
+ use_operand_p ptr, prev, list;
+ int count;
+
+ gcc_assert (TREE_CODE (var) == SSA_NAME);
+
+ list = &(SSA_NAME_IMM_USE_NODE (var));
+ gcc_assert (list->use == NULL);
+
+ if (list->prev == NULL)
{
- *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
- for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- {
- SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
- SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
- }
+ gcc_assert (list->next == NULL);
+ return false;
}
+
+ prev = list;
+ count = 0;
+ for (ptr = list->next; ptr != list; )
+ {
+ if (prev != ptr->prev)
+ goto error;
+
+ if (ptr->use == NULL)
+ goto error; /* 2 roots, or SAFE guard node. */
+ else if (*(ptr->use) != var)
+ goto error;
+
+ prev = ptr;
+ ptr = ptr->next;
+
+ /* Avoid infinite loops. 50,000,000 uses probably indicates a
+ problem. */
+ if (count++ > 50000000)
+ goto error;
+ }
+
+ /* Verify list in the other direction. */
+ prev = list;
+ for (ptr = list->prev; ptr != list; )
+ {
+ if (prev != ptr->next)
+ goto error;
+ prev = ptr;
+ ptr = ptr->prev;
+ if (count-- < 0)
+ goto error;
+ }
+
+ if (count != 0)
+ goto error;
+
+ return false;
+
+ error:
+ if (ptr->stmt && stmt_modified_p (ptr->stmt))
+ {
+ fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
+ print_generic_stmt (f, ptr->stmt, TDF_SLIM);
+ }
+ fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
+ (void *)ptr->use);
+ print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
+ fprintf(f, "\n");
+ return true;
}
-/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artificial stmt which looks like a load from the store, this can
- be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+/* Dump all the immediate uses to FILE. */
void
-create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
+dump_immediate_uses_for (FILE *file, tree var)
{
- stmt_ann_t ann;
- tree op;
- stmt_operands_t tmp;
- unsigned j;
+ imm_use_iterator iter;
+ use_operand_p use_p;
- memset (&tmp, 0, sizeof (stmt_operands_t));
- ann = get_stmt_ann (new_stmt);
+ gcc_assert (var && TREE_CODE (var) == SSA_NAME);
- /* Free operands just in case is was an existing stmt. */
- free_ssa_operands (&(ann->operands));
+ print_generic_expr (file, var, TDF_SLIM);
+ fprintf (file, " : -->");
+ if (has_zero_uses (var))
+ fprintf (file, " no uses.\n");
+ else
+ if (has_single_use (var))
+ fprintf (file, " single use.\n");
+ else
+ fprintf (file, "%d uses.\n", num_imm_uses (var));
- build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
- free_vuses (&(ann->operands.vuse_ops));
- free_v_may_defs (&(ann->operands.v_may_def_ops));
- free_v_must_defs (&(ann->operands.v_must_def_ops));
-
- /* For each VDEF on the original statement, we want to create a
- VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
- statement. */
- for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
+ FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
- append_vuse (op);
+ if (use_p->stmt == NULL && use_p->use == NULL)
+ fprintf (file, "***end of stmt iterator marker***\n");
+ else
+ if (!is_gimple_reg (USE_FROM_PTR (use_p)))
+ print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ else
+ print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
}
-
- for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
+ fprintf(file, "\n");
+}
+
+
+/* Dump all the immediate uses to FILE. */
+
+void
+dump_immediate_uses (FILE *file)
+{
+ tree var;
+ unsigned int x;
+
+ fprintf (file, "Immediate_uses: \n\n");
+ for (x = 1; x < num_ssa_names; x++)
{
- op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
- append_vuse (op);
+ var = ssa_name(x);
+ if (!var)
+ continue;
+ dump_immediate_uses_for (file, var);
}
+}
- /* Now set the vuses for this new stmt. */
- ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
+
+/* Dump def-use edges on stderr. */
+
+void
+debug_immediate_uses (void)
+{
+ dump_immediate_uses (stderr);
}
-#include "gt-tree-ssa-operands.h"
+
+/* Dump def-use edges on stderr. */
+
+void
+debug_immediate_uses_for (tree var)
+{
+ dump_immediate_uses_for (stderr, var);
+}