/* Operand is a "non-specific" kill for call-clobbers and such. This
is used to distinguish "reset the world" events from explicit
- MODIFY_EXPRs. */
+ GIMPLE_MODIFY_STMTs. */
#define opf_non_specific (1 << 3)
/* Array for building all the def operands. */
/* Array for building all the V_MUST_DEF operands. */
static VEC(tree,heap) *build_v_must_defs;
-/* These arrays are the cached operand vectors for call clobbered calls. */
-static bool ops_active = false;
-
-static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
-static unsigned operand_memory_index;
-
static void get_expr_operands (tree, tree *, int);
-static def_optype_p free_defs = NULL;
-static use_optype_p free_uses = NULL;
-static vuse_optype_p free_vuses = NULL;
-static maydef_optype_p free_maydefs = NULL;
-static mustdef_optype_p free_mustdefs = NULL;
-
+/* Number of functions with initialized ssa_operands. */
+static int n_initialized = 0;
+
+/* Allocates operand OP of given TYPE from the appropriate free list,
+ or of the new value if the list is empty. */
+
+#define ALLOC_OPTYPE(OP, TYPE) \
+ do \
+ { \
+ TYPE##_optype_p ret \
+ = gimple_ssa_operands (cfun)->free_##TYPE##s; \
+ if (ret) \
+ gimple_ssa_operands (cfun)->free_##TYPE##s \
+ = ret->next; \
+ else \
+ ret = ssa_operand_alloc (sizeof (*ret)); \
+ (OP) = ret; \
+ } while (0)
/* Return the DECL_UID of the base variable of T. */
bool
ssa_operands_active (void)
{
- return ops_active;
+ return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
void
init_ssa_operands (void)
{
- build_defs = VEC_alloc (tree, heap, 5);
- build_uses = VEC_alloc (tree, heap, 10);
- build_vuses = VEC_alloc (tree, heap, 25);
- build_v_may_defs = VEC_alloc (tree, heap, 25);
- build_v_must_defs = VEC_alloc (tree, heap, 25);
-
- gcc_assert (operand_memory == NULL);
- operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
- ops_active = true;
+ if (!n_initialized++)
+ {
+ build_defs = VEC_alloc (tree, heap, 5);
+ build_uses = VEC_alloc (tree, heap, 10);
+ build_vuses = VEC_alloc (tree, heap, 25);
+ build_v_may_defs = VEC_alloc (tree, heap, 25);
+ build_v_must_defs = VEC_alloc (tree, heap, 25);
+ }
+
+ gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
+ gimple_ssa_operands (cfun)->ops_active = true;
memset (&clobber_stats, 0, sizeof (clobber_stats));
}
fini_ssa_operands (void)
{
struct ssa_operand_memory_d *ptr;
- VEC_free (tree, heap, build_defs);
- VEC_free (tree, heap, build_uses);
- VEC_free (tree, heap, build_v_must_defs);
- VEC_free (tree, heap, build_v_may_defs);
- VEC_free (tree, heap, build_vuses);
- free_defs = NULL;
- free_uses = NULL;
- free_vuses = NULL;
- free_maydefs = NULL;
- free_mustdefs = NULL;
- while ((ptr = operand_memory) != NULL)
+ if (!--n_initialized)
{
- operand_memory = operand_memory->next;
+ VEC_free (tree, heap, build_defs);
+ VEC_free (tree, heap, build_uses);
+ VEC_free (tree, heap, build_v_must_defs);
+ VEC_free (tree, heap, build_v_may_defs);
+ VEC_free (tree, heap, build_vuses);
+ }
+ gimple_ssa_operands (cfun)->free_defs = NULL;
+ gimple_ssa_operands (cfun)->free_uses = NULL;
+ gimple_ssa_operands (cfun)->free_vuses = NULL;
+ gimple_ssa_operands (cfun)->free_maydefs = NULL;
+ gimple_ssa_operands (cfun)->free_mustdefs = NULL;
+ while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
+ {
+ gimple_ssa_operands (cfun)->operand_memory
+ = gimple_ssa_operands (cfun)->operand_memory->next;
ggc_free (ptr);
}
- ops_active = false;
+ gimple_ssa_operands (cfun)->ops_active = false;
if (dump_file && (dump_flags & TDF_STATS))
{
ssa_operand_alloc (unsigned size)
{
char *ptr;
- if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
+ if (gimple_ssa_operands (cfun)->operand_memory_index + size
+ >= SSA_OPERAND_MEMORY_SIZE)
{
struct ssa_operand_memory_d *ptr;
ptr = GGC_NEW (struct ssa_operand_memory_d);
- ptr->next = operand_memory;
- operand_memory = ptr;
- operand_memory_index = 0;
+ ptr->next = gimple_ssa_operands (cfun)->operand_memory;
+ gimple_ssa_operands (cfun)->operand_memory = ptr;
+ gimple_ssa_operands (cfun)->operand_memory_index = 0;
}
- ptr = &(operand_memory->mem[operand_memory_index]);
- operand_memory_index += size;
+ ptr = &(gimple_ssa_operands (cfun)->operand_memory
+ ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
+ gimple_ssa_operands (cfun)->operand_memory_index += size;
return ptr;
}
-/* Make sure PTR is in the correct immediate use list. Since uses are simply
- pointers into the stmt TREE, there is no way of telling if anyone has
- changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
- The contents are different, but the pointer is still the same. This
- routine will check to make sure PTR is in the correct list, and if it isn't
- put it in the correct list. We cannot simply check the previous node
- because all nodes in the same stmt might have be changed. */
+
+/* This routine makes sure that PTR is in an immediate use list, and makes
+ sure the stmt pointer is set to the current stmt. */
static inline void
-correct_use_link (use_operand_p ptr, tree stmt)
+set_virtual_use_link (use_operand_p ptr, tree stmt)
{
- use_operand_p prev;
- tree root;
-
/* fold_stmt may have changed the stmt pointers. */
if (ptr->stmt != stmt)
ptr->stmt = stmt;
- prev = ptr->prev;
- if (prev)
- {
- /* Find the root element, making sure we skip any safe iterators. */
- while (prev->use != NULL || prev->stmt == NULL)
- prev = prev->prev;
+ /* If this use isn't in a list, add it to the correct list. */
+ if (!ptr->prev)
+ link_imm_use (ptr, *(ptr->use));
+}
- /* Get the SSA_NAME of the list the node is in. */
- root = prev->stmt;
+/* Appends ELT after TO, and moves the TO pointer to ELT. */
+
+#define APPEND_OP_AFTER(ELT, TO) \
+ do \
+ { \
+ (TO)->next = (ELT); \
+ (TO) = (ELT); \
+ } while (0)
+
+/* Appends head of list FROM after TO, and move both pointers
+ to their successors. */
+
+#define MOVE_HEAD_AFTER(FROM, TO) \
+ do \
+ { \
+ APPEND_OP_AFTER (FROM, TO); \
+ (FROM) = (FROM)->next; \
+ } while (0)
+
+/* Moves OP to appropriate freelist. OP is set to its successor. */
+
+#define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
+ do \
+ { \
+ TYPE##_optype_p next = (OP)->next; \
+ (OP)->next \
+ = gimple_ssa_operands (cfun)->free_##TYPE##s; \
+ gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
+ (OP) = next; \
+ } while (0)
+
+/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
+ of immediate uses. STMT is the current statement. */
+
+#define INITIALIZE_USE(USE_PTR, VAL, STMT) \
+ do \
+ { \
+ (USE_PTR)->use = (VAL); \
+ link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
+ } while (0)
+
+/* Adds OP to the list of defs after LAST, and moves
+ LAST to the new element. */
- /* If it's the right list, simply return. */
- if (root == *(ptr->use))
- return;
- }
+static inline void
+add_def_op (tree *op, def_optype_p *last)
+{
+ def_optype_p new;
- /* It is in the wrong list if we reach here. */
- delink_imm_use (ptr);
- link_imm_use (ptr, *(ptr->use));
+ ALLOC_OPTYPE (new, def);
+ DEF_OP_PTR (new) = op;
+ APPEND_OP_AFTER (new, *last);
}
+/* Adds OP to the list of uses of statement STMT after LAST, and moves
+ LAST to the new element. */
-/* This routine makes sure that PTR is in an immediate use list, and makes
- sure the stmt pointer is set to the current stmt. Virtual uses do not need
- the overhead of correct_use_link since they cannot be directly manipulated
- like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
+static inline void
+add_use_op (tree stmt, tree *op, use_optype_p *last)
+{
+ use_optype_p new;
+
+ ALLOC_OPTYPE (new, use);
+ INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
+ APPEND_OP_AFTER (new, *last);
+}
+
+/* Adds OP to the list of vuses of statement STMT after LAST, and moves
+ LAST to the new element. */
static inline void
-set_virtual_use_link (use_operand_p ptr, tree stmt)
+add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
{
- /* fold_stmt may have changed the stmt pointers. */
- if (ptr->stmt != stmt)
- ptr->stmt = stmt;
+ vuse_optype_p new;
- /* If this use isn't in a list, add it to the correct list. */
- if (!ptr->prev)
- link_imm_use (ptr, *(ptr->use));
+ ALLOC_OPTYPE (new, vuse);
+ VUSE_OP (new) = op;
+ INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
+ APPEND_OP_AFTER (new, *last);
}
+/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
+ LAST to the new element. */
+
+static inline void
+add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
+{
+ maydef_optype_p new;
+
+ ALLOC_OPTYPE (new, maydef);
+ MAYDEF_RESULT (new) = op;
+ MAYDEF_OP (new) = op;
+ INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
+ APPEND_OP_AFTER (new, *last);
+}
+
+/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
+ LAST to the new element. */
+
+static inline void
+add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
+{
+ mustdef_optype_p new;
+
+ ALLOC_OPTYPE (new, mustdef);
+ MUSTDEF_RESULT (new) = op;
+ MUSTDEF_KILL (new) = op;
+ INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
+ APPEND_OP_AFTER (new, *last);
+}
+
+/* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Given that def operands list is not necessarily sorted, merging
+ the operands this way does not make much sense.
+ -- Make build_defs VEC of tree *. */
+
+static inline void
+finalize_ssa_def_ops (tree stmt)
+{
+ unsigned new_i;
+ struct def_optype_d new_list;
+ def_optype_p old_ops, last;
+ tree *old_base;
+
+ new_list.next = NULL;
+ last = &new_list;
-#define FINALIZE_OPBUILD build_defs
-#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
- build_defs, (I))
-#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
- build_defs, (I))
-#define FINALIZE_FUNC finalize_ssa_def_ops
-#define FINALIZE_ALLOC alloc_def
-#define FINALIZE_FREE free_defs
-#define FINALIZE_TYPE struct def_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
-#define FINALIZE_OPS DEF_OPS
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
-#include "tree-ssa-opfinalize.h"
+ old_ops = DEF_OPS (stmt);
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_defs))
+ {
+ tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
+ old_base = DEF_OP_PTR (old_ops);
+
+ if (old_base == new_base)
+ {
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ MOVE_HEAD_TO_FREELIST (old_ops, def);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_def_op (new_base, &last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_defs); new_i++)
+ add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ old_ops->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = old_ops;
+ }
+
+ /* Now set the stmt's operands. */
+ DEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ def_optype_p ptr;
+ unsigned x = 0;
+ for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_defs));
+ }
+#endif
+}
/* This routine will create stmt operands for STMT from the def build list. */
unsigned int num = VEC_length (tree, build_defs);
/* There should only be a single real definition per assignment. */
- gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
+ gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
/* If there is an old list, often the new list is identical, or close, so
find the elements at the beginning that are the same as the vector. */
VEC_truncate (tree, build_defs, 0);
}
-#define FINALIZE_OPBUILD build_uses
-#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
- build_uses, (I))
-#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
- build_uses, (I))
-#define FINALIZE_FUNC finalize_ssa_use_ops
-#define FINALIZE_ALLOC alloc_use
-#define FINALIZE_FREE free_uses
-#define FINALIZE_TYPE struct use_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
-#define FINALIZE_OPS USE_OPS
-#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE correct_use_link
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_ptr.use = (VAL); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- *(VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
+/* Takes elements from build_uses and turns them into use operands of STMT.
+ TODO -- Make build_uses VEC of tree *. */
+
+static inline void
+finalize_ssa_use_ops (tree stmt)
+{
+ unsigned new_i;
+ struct use_optype_d new_list;
+ use_optype_p old_ops, ptr, last;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = USE_OPS (stmt);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (USE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = old_ops;
+ }
+
+ /* Now create nodes for all the new nodes. */
+ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
+
+ last->next = NULL;
+
+ /* Now set the stmt's operands. */
+ USE_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_uses));
+ }
+#endif
+}
/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
finalize_ssa_use_ops (stmt);
VEC_truncate (tree, build_uses, 0);
}
-
-
-/* Return a new V_MAY_DEF operand vector for STMT, comparing to OLD_OPS_P. */
-#define FINALIZE_OPBUILD build_v_may_defs
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_v_may_defs, (I)))
-#define FINALIZE_FUNC finalize_ssa_v_may_def_ops
-#define FINALIZE_ALLOC alloc_maydef
-#define FINALIZE_FREE free_maydefs
-#define FINALIZE_TYPE struct maydef_optype_d
-#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
-#define FINALIZE_OPS MAYDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-
+
+
+/* Takes elements from build_v_may_defs and turns them into maydef operands of
+ STMT. */
+
+static inline void
+finalize_ssa_v_may_def_ops (tree stmt)
+{
+ unsigned new_i;
+ struct maydef_optype_d new_list;
+ maydef_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = MAYDEF_OPS (stmt);
+
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
+ {
+ act = VEC_index (tree, build_v_may_defs, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (MAYDEF_OP (old_ops));
+
+ if (old_base == new_base)
+ {
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ delink_imm_use (MAYDEF_OP_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, maydef);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_maydef_op (stmt, act, &last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
+ add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (MAYDEF_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
+ gimple_ssa_operands (cfun)->free_maydefs = old_ops;
+ }
+
+ /* Now set the stmt's operands. */
+ MAYDEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_v_may_defs));
+ }
+#endif
+}
+
static void
finalize_ssa_v_may_defs (tree stmt)
{
VEC_truncate (tree, build_v_may_defs, 0);
}
-
-#define FINALIZE_OPBUILD build_vuses
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_vuses, (I)))
-#define FINALIZE_FUNC finalize_ssa_vuse_ops
-#define FINALIZE_ALLOC alloc_vuse
-#define FINALIZE_FREE free_vuses
-#define FINALIZE_TYPE struct vuse_optype_d
-#define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
-#define FINALIZE_OPS VUSE_OPS
-#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
+/* Takes elements from build_vuses and turns them into vuse operands of
+ STMT. */
+
+static inline void
+finalize_ssa_vuse_ops (tree stmt)
+{
+ unsigned new_i;
+ struct vuse_optype_d new_list;
+ vuse_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = VUSE_OPS (stmt);
+
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_vuses))
+ {
+ act = VEC_index (tree, build_vuses, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (VUSE_OP (old_ops));
+
+ if (old_base == new_base)
+ {
+ /* if variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (VUSE_OP_PTR (last), stmt);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* if old is less than new, old goes to the free list. */
+ delink_imm_use (USE_OP_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, vuse);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_vuse_op (stmt, act, &last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_vuses list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
+ add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (VUSE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
+ gimple_ssa_operands (cfun)->free_vuses = old_ops;
+ }
+
+ /* Now set the stmt's operands. */
+ VUSE_OPS (stmt) = new_list.next;
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_vuses));
+ }
+#endif
+}
+
/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
static void
VEC_truncate (tree, build_vuses, 0);
}
-
-/* Return a new V_MUST_DEF operand vector for STMT, comparing to OLD_OPS_P. */
-
-#define FINALIZE_OPBUILD build_v_must_defs
-#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
- build_v_must_defs, (I)))
-#define FINALIZE_FUNC finalize_ssa_v_must_def_ops
-#define FINALIZE_ALLOC alloc_mustdef
-#define FINALIZE_FREE free_mustdefs
-#define FINALIZE_TYPE struct mustdef_optype_d
-#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
-#define FINALIZE_OPS MUSTDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
-#define FINALIZE_CORRECT_USE set_virtual_use_link
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) get_name_decl (VAR)
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->kill_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->kill_var);\
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
+/* Takes elements from build_v_must_defs and turns them into mustdef operands of
+ STMT. */
+
+static inline void
+finalize_ssa_v_must_def_ops (tree stmt)
+{
+ unsigned new_i;
+ struct mustdef_optype_d new_list;
+ mustdef_optype_p old_ops, ptr, last;
+ tree act;
+ unsigned old_base, new_base;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = MUSTDEF_OPS (stmt);
+
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
+ {
+ act = VEC_index (tree, build_v_must_defs, new_i);
+ new_base = get_name_decl (act);
+ old_base = get_name_decl (MUSTDEF_KILL (old_ops));
+
+ if (old_base == new_base)
+ {
+ /* If variables are the same, reuse this node. */
+ MOVE_HEAD_AFTER (old_ops, last);
+ set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
+ new_i++;
+ }
+ else if (old_base < new_base)
+ {
+ /* If old is less than new, old goes to the free list. */
+ delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
+ MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
+ }
+ else
+ {
+ /* This is a new operand. */
+ add_mustdef_op (stmt, act, &last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
+ add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
+
+ last->next = NULL;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (MUSTDEF_KILL_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
+ gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
+ }
+
+ /* Now set the stmt's operands. */
+ MUSTDEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_v_must_defs));
+ }
+#endif
+}
static void
finalize_ssa_v_must_defs (tree stmt)
/* REF is a tree that contains the entire pointer dereference
expression, if available, or NULL otherwise. ALIAS is the variable
we are asking if REF can access. OFFSET and SIZE come from the
- memory access expression that generated this virtual operand.
- FOR_CLOBBER is true is this is adding a virtual operand for a call
- clobber. */
+ memory access expression that generated this virtual operand. */
static bool
access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
tree base = ref ? get_base_address (ref) : NULL;
+ /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
+ using a call-clobbered memory tag. By definition, call-clobbered
+ memory tags can always touch .GLOBAL_VAR. */
+ if (alias == gimple_global_var (cfun))
+ return true;
+
/* If ALIAS is an SFT, it can't be touched if the offset
and size of the access is not overlapping with the SFT offset and
size. This is only true if we are accessing through a pointer
{ return (struct foos *)&foo; }
(taken from 20000623-1.c)
+
+ The docs also say/imply that access through union pointers
+ is legal (but *not* if you take the address of the union member,
+ i.e. the inverse), such that you can do
+
+ typedef union {
+ int d;
+ } U;
+
+ int rv;
+ void breakme()
+ {
+ U *rv0;
+ U *pretmp = (U*)&rv;
+ rv0 = pretmp;
+ rv0->d = 42;
+ }
+ To implement this, we just punt on accesses through union
+ pointers entirely.
*/
else if (ref
&& flag_strict_aliasing
&& TREE_CODE (ref) != INDIRECT_REF
&& !MTAG_P (alias)
+ && (TREE_CODE (base) != INDIRECT_REF
+ || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
&& !AGGREGATE_TYPE_P (TREE_TYPE (alias))
&& TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
- && !POINTER_TYPE_P (TREE_TYPE (alias)))
+ && !var_ann (alias)->is_heapvar
+ /* When the struct has may_alias attached to it, we need not to
+ return true. */
+ && get_alias_set (base))
{
#ifdef ACCESS_DEBUGGING
fprintf (stderr, "Access to ");
check that this only happens on non-specific stores.
Note that if this is a specific store, i.e. associated with a
- modify_expr, then we can't suppress the V_MAY_DEF, lest we run
+ gimple_modify_stmt, then we can't suppress the V_MAY_DEF, lest we run
into validation problems.
This can happen when programs cast away const, leaving us with a
if (v_ann->is_aliased
|| none_added
|| (TREE_CODE (var) == SYMBOL_MEMORY_TAG
- && for_clobber
- && SMT_USED_ALONE (var)))
+ && for_clobber))
{
- /* Every bare SMT def we add should have SMT_USED_ALONE
- set on it, or else we will get the wrong answer on
- clobbers. */
- if (none_added
- && !updating_used_alone && aliases_computed_p
- && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
- gcc_assert (SMT_USED_ALONE (var));
-
append_v_may_def (var);
}
}
/* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
+ if (gimple_global_var (cfun))
{
- add_stmt_operand (&global_var, s_ann, opf_is_def);
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_is_def);
return;
}
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
/* Add a V_MAY_DEF operand for every call clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
{
tree var = referenced_var_lookup (u);
unsigned int escape_mask = var_ann (var)->escape_mask;
/* if the function is not pure, it may reference memory. Add
a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
+ if (gimple_global_var (cfun))
{
- add_stmt_operand (&global_var, s_ann, opf_none);
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_none);
return;
}
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
/* Add a VUSE for each call-clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
{
tree var = referenced_var (u);
tree real_var = var;
computed. By not bothering with virtual operands for CALL_EXPRs
we avoid adding superfluous virtual operands, which can be a
significant compile time sink (See PR 15855). */
- if (aliases_computed_p
- && !bitmap_empty_p (call_clobbered_vars)
+ if (gimple_aliases_computed_p (cfun)
+ && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
&& !(call_flags & ECF_NOVOPS))
{
/* A 'pure' or a 'const' function never call-clobbers anything.
/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_stmt_operand (&var, s_ann, opf_is_def);
+ }
else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
{
tree var = referenced_var (i);
add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
}
/* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
{
tree var = referenced_var (i);
/* Scan operands for the assignment expression EXPR in statement STMT. */
static void
-get_modify_expr_operands (tree stmt, tree expr)
+get_modify_stmt_operands (tree stmt, tree expr)
{
/* First get operands from the RHS. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_none);
/* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
registers. If the LHS is a store to memory, we will either need
The determination of whether to use a preserving or a killing
definition is done while scanning the LHS of the assignment. By
default, assume that we will emit a V_MUST_DEF. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0),
+ opf_is_def|opf_kill_def);
}
get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
return;
- case MODIFY_EXPR:
- get_modify_expr_operands (stmt, expr);
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, expr);
return;
case CONSTRUCTOR:
case OMP_PARALLEL:
case OMP_SECTIONS:
case OMP_FOR:
- case OMP_RETURN_EXPR:
case OMP_SINGLE:
case OMP_MASTER:
case OMP_ORDERED:
case OMP_CRITICAL:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
/* Expressions that make no memory references. */
return;
code = TREE_CODE (stmt);
switch (code)
{
- case MODIFY_EXPR:
- get_modify_expr_operands (stmt, stmt);
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, stmt);
break;
case COND_EXPR:
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- if (!is_gimple_reg (USE_FROM_PTR (use_p)))
- print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ if (use_p->stmt == NULL && use_p->use == NULL)
+ fprintf (file, "***end of stmt iterator marker***\n");
else
- print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
+ if (!is_gimple_reg (USE_FROM_PTR (use_p)))
+ print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ else
+ print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
}
fprintf(file, "\n");
}
{
dump_immediate_uses_for (stderr, var);
}
-
-#include "gt-tree-ssa-operands.h"