/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
The operand tree is the parsed by the various get_* routines which look
through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
- found. There are 5 of these routines, each representing one of the
- 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
- Virtual Must Defs.
+ found. There are 4 of these routines, each representing one of the
+ 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
The append_* routines check for duplication, and simply keep a list of
unique objects for each operand type in the build_* extendable vectors.
Once the stmt tree is completely parsed, the finalize_ssa_operands()
routine is called, which proceeds to perform the finalization routine
- on each of the 5 operand vectors which have been built up.
+ on each of the 4 operand vectors which have been built up.
If the stmt had a previous operand cache, the finalization routines
attempt to match up the new operands with the old ones. If it's a perfect
i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'.
+ it contains 'a_5' rather than 'a'. */
-*/
+
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
+
+static struct
+{
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobbered_mem_symbols. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (VDEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
+
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_mem_symbols. */
+ unsigned int readonly_clobbers;
+
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
-#define opf_none 0
+#define opf_use 0
/* Operand is the target of an assignment expression or a
- call-clobbered variable */
-#define opf_is_def (1 << 0)
-
-/* Operand is the target of an assignment expression. */
-#define opf_kill_def (1 << 1)
+ call-clobbered variable. */
+#define opf_def (1 << 0)
/* No virtual operands should be created in the expression. This is used
when traversing ADDR_EXPR nodes which have different semantics than
need to consider are indices into arrays. For instance, &a.b[i] should
generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
VUSE for 'b'. */
-#define opf_no_vops (1 << 2)
-
-/* Operand is a "non-specific" kill for call-clobbers and such. This is used
- to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
-#define opf_non_specific (1 << 3)
+#define opf_no_vops (1 << 1)
-/* This structure maintain a sorted list of operands which is created by
- parse_ssa_operand. */
-struct opbuild_list_d GTY (())
-{
- varray_type vars; /* The VAR_DECLS tree. */
- varray_type uid; /* The sort value for virtual symbols. */
- varray_type next; /* The next index in the sorted list. */
- int first; /* First element in list. */
- unsigned num; /* Number of elements. */
-};
-
-#define OPBUILD_LAST -1
-
+/* Operand is an implicit reference. This is used to distinguish
+ explicit assignments in the form of GIMPLE_MODIFY_STMT from
+ clobbering sites like function calls or ASM_EXPRs. */
+#define opf_implicit (1 << 2)
/* Array for building all the def operands. */
-static GTY (()) struct opbuild_list_d build_defs;
+static VEC(tree,heap) *build_defs;
/* Array for building all the use operands. */
-static GTY (()) struct opbuild_list_d build_uses;
+static VEC(tree,heap) *build_uses;
-/* Array for building all the v_may_def operands. */
-static GTY (()) struct opbuild_list_d build_v_may_defs;
+/* Set for building all the VDEF operands. */
+static VEC(tree,heap) *build_vdefs;
-/* Array for building all the vuse operands. */
-static GTY (()) struct opbuild_list_d build_vuses;
+/* Set for building all the VUSE operands. */
+static VEC(tree,heap) *build_vuses;
-/* Array for building all the v_must_def operands. */
-static GTY (()) struct opbuild_list_d build_v_must_defs;
+/* Bitmap obstack for our datastructures that needs to survive across
+ compilations of multiple functions. */
+static bitmap_obstack operands_bitmap_obstack;
-/* True if the operands for call clobbered vars are cached and valid. */
-bool ssa_call_clobbered_cache_valid;
-bool ssa_ro_call_cache_valid;
+/* Set for building all the loaded symbols. */
+static bitmap build_loads;
-/* These arrays are the cached operand vectors for call clobbered calls. */
-static VEC(tree,heap) *clobbered_v_may_defs;
-static VEC(tree,heap) *clobbered_vuses;
-static VEC(tree,heap) *ro_call_vuses;
-static bool clobbered_aliased_loads;
-static bool clobbered_aliased_stores;
-static bool ro_call_aliased_loads;
-static bool ops_active = false;
-
-static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
-static unsigned operand_memory_index;
+/* Set for building all the stored symbols. */
+static bitmap build_stores;
static void get_expr_operands (tree, tree *, int);
-static void get_asm_expr_operands (tree);
-static void get_indirect_ref_operands (tree, tree, int);
-static void get_tmr_operands (tree, tree, int);
-static void get_call_expr_operands (tree, tree);
-static inline void append_def (tree *);
-static inline void append_use (tree *);
-static void append_v_may_def (tree);
-static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree, tree);
-static void add_call_read_ops (tree);
-static void add_stmt_operand (tree *, stmt_ann_t, int);
-static void build_ssa_operands (tree stmt);
-
-static def_optype_p free_defs = NULL;
-static use_optype_p free_uses = NULL;
-static vuse_optype_p free_vuses = NULL;
-static maydef_optype_p free_maydefs = NULL;
-static mustdef_optype_p free_mustdefs = NULL;
-
-/* Initialize a virtual operand build LIST called NAME with NUM elements. */
-static inline void
-opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
- const char *name)
-{
- list->first = OPBUILD_LAST;
- list->num = 0;
- VARRAY_TREE_INIT (list->vars, num, name);
- VARRAY_UINT_INIT (list->uid, num, "List UID");
- VARRAY_INT_INIT (list->next, num, "List NEXT");
-}
+/* Number of functions with initialized ssa_operands. */
+static int n_initialized = 0;
+/* Statement change buffer. Data structure used to record state
+ information for statements. This is used to determine what needs
+ to be done in order to update the SSA web after a statement is
+ modified by a pass. If STMT is a statement that has just been
+ created, or needs to be folded via fold_stmt, or anything that
+ changes its physical structure then the pass should:
-/* Initialize a real operand build LIST called NAME with NUM elements. */
+ 1- Call push_stmt_changes (&stmt) to record the current state of
+ STMT before any modifications are made.
-static inline void
-opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
-{
- list->first = OPBUILD_LAST;
- list->num = 0;
- VARRAY_TREE_PTR_INIT (list->vars, num, name);
- VARRAY_INT_INIT (list->next, num, "List NEXT");
- /* The UID field is not needed since we sort based on the pointer value. */
- list->uid = NULL;
-}
+ 2- Make all appropriate modifications to the statement.
+ 3- Call pop_stmt_changes (&stmt) to find new symbols that
+ need to be put in SSA form, SSA name mappings for names that
+ have disappeared, recompute invariantness for address
+ expressions, cleanup EH information, etc.
-/* Free memory used in virtual operand build object LIST. */
+ If it is possible to determine that the statement was not modified,
+ instead of calling pop_stmt_changes it is quicker to call
+ discard_stmt_changes to avoid the expensive and unnecessary operand
+ re-scan and change comparison. */
-static inline void
-opbuild_free (struct opbuild_list_d *list)
+struct scb_d
{
- list->vars = NULL;
- list->uid = NULL;
- list->next = NULL;
-}
+ /* Pointer to the statement being modified. */
+ tree *stmt_p;
+
+ /* If the statement references memory these are the sets of symbols
+ loaded and stored by the statement. */
+ bitmap loads;
+ bitmap stores;
+};
+
+typedef struct scb_d *scb_t;
+DEF_VEC_P(scb_t);
+DEF_VEC_ALLOC_P(scb_t,heap);
+/* Stack of statement change buffers (SCB). Every call to
+ push_stmt_changes pushes a new buffer onto the stack. Calls to
+ pop_stmt_changes pop a buffer off of the stack and compute the set
+ of changes for the popped statement. */
+static VEC(scb_t,heap) *scb_stack;
-/* Number of elements in an opbuild list. */
+/* Return the DECL_UID of the base variable of T. */
static inline unsigned
-opbuild_num_elems (struct opbuild_list_d *list)
+get_name_decl (tree t)
{
- return list->num;
+ if (TREE_CODE (t) != SSA_NAME)
+ return DECL_UID (t);
+ else
+ return DECL_UID (SSA_NAME_VAR (t));
}
-/* Add VAR to the real operand list LIST, keeping it sorted and avoiding
- duplicates. The actual sort value is the tree pointer value. */
+/* Comparison function for qsort used in operand_build_sort_virtual. */
-static inline void
-opbuild_append_real (struct opbuild_list_d *list, tree *var)
+static int
+operand_build_cmp (const void *p, const void *q)
{
- int index;
+ tree e1 = *((const tree *)p);
+ tree e2 = *((const tree *)q);
+ unsigned int u1,u2;
+ u1 = get_name_decl (e1);
+ u2 = get_name_decl (e2);
+
+ /* We want to sort in ascending order. They can never be equal. */
#ifdef ENABLE_CHECKING
- /* Ensure the real operand doesn't exist already. */
- for (index = list->first;
- index != OPBUILD_LAST;
- index = VARRAY_INT (list->next, index))
- gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
+ gcc_assert (u1 != u2);
#endif
-
- /* First item in the list. */
- index = VARRAY_ACTIVE_SIZE (list->vars);
- if (index == 0)
- list->first = index;
- else
- VARRAY_INT (list->next, index - 1) = index;
- VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
- VARRAY_PUSH_TREE_PTR (list->vars, var);
- list->num++;
+ return (u1 > u2 ? 1 : -1);
}
-/* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
- duplicates. The actual sort value is the DECL UID of the base variable. */
+/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
static inline void
-opbuild_append_virtual (struct opbuild_list_d *list, tree var)
+operand_build_sort_virtual (VEC(tree,heap) *list)
{
- int index, curr, last;
- unsigned int var_uid;
-
- if (TREE_CODE (var) != SSA_NAME)
- var_uid = DECL_UID (var);
- else
- var_uid = DECL_UID (SSA_NAME_VAR (var));
+ int num = VEC_length (tree, list);
- index = VARRAY_ACTIVE_SIZE (list->vars);
+ if (num < 2)
+ return;
- if (index == 0)
+ if (num == 2)
{
- VARRAY_PUSH_TREE (list->vars, var);
- VARRAY_PUSH_UINT (list->uid, var_uid);
- VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
- list->first = 0;
- list->num = 1;
+ if (get_name_decl (VEC_index (tree, list, 0))
+ > get_name_decl (VEC_index (tree, list, 1)))
+ {
+ /* Swap elements if in the wrong order. */
+ tree tmp = VEC_index (tree, list, 0);
+ VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
+ VEC_replace (tree, list, 1, tmp);
+ }
return;
}
- last = OPBUILD_LAST;
- /* Find the correct spot in the sorted list. */
- for (curr = list->first;
- curr != OPBUILD_LAST;
- last = curr, curr = VARRAY_INT (list->next, curr))
- {
- if (VARRAY_UINT (list->uid, curr) > var_uid)
- break;
- }
-
- if (last == OPBUILD_LAST)
- {
- /* First item in the list. */
- VARRAY_PUSH_INT (list->next, list->first);
- list->first = index;
- }
- else
- {
- /* Don't enter duplicates at all. */
- if (VARRAY_UINT (list->uid, last) == var_uid)
- return;
-
- VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
- VARRAY_INT (list->next, last) = index;
- }
- VARRAY_PUSH_TREE (list->vars, var);
- VARRAY_PUSH_UINT (list->uid, var_uid);
- list->num++;
+ /* There are 3 or more elements, call qsort. */
+ qsort (VEC_address (tree, list),
+ VEC_length (tree, list),
+ sizeof (tree),
+ operand_build_cmp);
}
-/* Return the first element index in LIST. OPBUILD_LAST means there are no
- more elements. */
+/* Return true if the SSA operands cache is active. */
-static inline int
-opbuild_first (struct opbuild_list_d *list)
+bool
+ssa_operands_active (void)
{
- if (list->num > 0)
- return list->first;
- else
- return OPBUILD_LAST;
+ return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
-/* Return the next element after PREV in LIST. */
-
-static inline int
-opbuild_next (struct opbuild_list_d *list, int prev)
-{
- return VARRAY_INT (list->next, prev);
-}
+/* VOPs are of variable sized, so the free list maps "free buckets" to the
+ following table:
+ bucket # operands
+ ------ ----------
+ 0 1
+ 1 2
+ ...
+ 15 16
+ 16 17-24
+ 17 25-32
+ 18 31-40
+ ...
+ 29 121-128
+ Any VOPs larger than this are simply added to the largest bucket when they
+ are freed. */
-/* Return the real element at index ELEM in LIST. */
+/* Return the number of operands used in bucket BUCKET. */
-static inline tree *
-opbuild_elem_real (struct opbuild_list_d *list, int elem)
+static inline int
+vop_free_bucket_size (int bucket)
{
- return VARRAY_TREE_PTR (list->vars, elem);
+#ifdef ENABLE_CHECKING
+ gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
+#endif
+ if (bucket < 16)
+ return bucket + 1;
+ return (bucket - 13) * 8;
}
-/* Return the virtual element at index ELEM in LIST. */
-
-static inline tree
-opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
-{
- return VARRAY_TREE (list->vars, elem);
-}
+/* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
+ beyond the end of the bucket table, return -1. */
-
-/* Return the virtual element uid at index ELEM in LIST. */
-static inline unsigned int
-opbuild_elem_uid (struct opbuild_list_d *list, int elem)
+static inline int
+vop_free_bucket_index (int num)
{
- return VARRAY_UINT (list->uid, elem);
+ gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
+
+ /* Sizes 1 through 16 use buckets 0-15. */
+ if (num <= 16)
+ return num - 1;
+ /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
+ num = 14 + (num - 1) / 8;
+ if (num >= NUM_VOP_FREE_BUCKETS)
+ return -1;
+ else
+ return num;
}
-/* Reset an operand build list. */
+/* Initialize the VOP free buckets. */
static inline void
-opbuild_clear (struct opbuild_list_d *list)
+init_vop_buckets (void)
{
- list->first = OPBUILD_LAST;
- VARRAY_POP_ALL (list->vars);
- VARRAY_POP_ALL (list->next);
- if (list->uid)
- VARRAY_POP_ALL (list->uid);
- list->num = 0;
+ int x;
+
+ for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
+ gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
}
-/* Remove ELEM from LIST where PREV is the previous element. Return the next
- element. */
+/* Add PTR to the appropriate VOP bucket. */
-static inline int
-opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
+static inline void
+add_vop_to_freelist (voptype_p ptr)
{
- int ret;
- if (prev != OPBUILD_LAST)
- {
- gcc_assert (VARRAY_INT (list->next, prev) == elem);
- ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
- }
- else
- {
- gcc_assert (list->first == elem);
- ret = list->first = VARRAY_INT (list->next, elem);
- }
- list->num--;
- return ret;
-}
-
+ int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
-/* Return true if the ssa operands cache is active. */
+ /* Too large, use the largest bucket so its not a complete throw away. */
+ if (bucket == -1)
+ bucket = NUM_VOP_FREE_BUCKETS - 1;
-bool
-ssa_operands_active (void)
-{
- return ops_active;
+ ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
}
+
+/* These are the sizes of the operand memory buffer which gets allocated each
+ time more operands space is required. The final value is the amount that is
+ allocated every time after that. */
+
+#define OP_SIZE_INIT 0
+#define OP_SIZE_1 30
+#define OP_SIZE_2 110
+#define OP_SIZE_3 511
/* Initialize the operand cache routines. */
void
init_ssa_operands (void)
{
- opbuild_initialize_real (&build_defs, 5, "build defs");
- opbuild_initialize_real (&build_uses, 10, "build uses");
- opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
- opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
- opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
- gcc_assert (operand_memory == NULL);
- operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
- ops_active = true;
+ if (!n_initialized++)
+ {
+ build_defs = VEC_alloc (tree, heap, 5);
+ build_uses = VEC_alloc (tree, heap, 10);
+ build_vuses = VEC_alloc (tree, heap, 25);
+ build_vdefs = VEC_alloc (tree, heap, 25);
+ bitmap_obstack_initialize (&operands_bitmap_obstack);
+ build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
+ build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+ scb_stack = VEC_alloc (scb_t, heap, 20);
+ }
+
+ gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
+ gimple_ssa_operands (cfun)->operand_memory_index
+ = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
+ gimple_ssa_operands (cfun)->ops_active = true;
+ memset (&clobber_stats, 0, sizeof (clobber_stats));
+ init_vop_buckets ();
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
}
fini_ssa_operands (void)
{
struct ssa_operand_memory_d *ptr;
- opbuild_free (&build_defs);
- opbuild_free (&build_uses);
- opbuild_free (&build_v_must_defs);
- opbuild_free (&build_v_may_defs);
- opbuild_free (&build_vuses);
- free_defs = NULL;
- free_uses = NULL;
- free_vuses = NULL;
- free_maydefs = NULL;
- free_mustdefs = NULL;
- while ((ptr = operand_memory) != NULL)
+ unsigned ix;
+ tree mpt;
+
+ if (!--n_initialized)
{
- operand_memory = operand_memory->next;
+ VEC_free (tree, heap, build_defs);
+ VEC_free (tree, heap, build_uses);
+ VEC_free (tree, heap, build_vdefs);
+ VEC_free (tree, heap, build_vuses);
+ BITMAP_FREE (build_loads);
+ BITMAP_FREE (build_stores);
+
+ /* The change buffer stack had better be empty. */
+ gcc_assert (VEC_length (scb_t, scb_stack) == 0);
+ VEC_free (scb_t, heap, scb_stack);
+ scb_stack = NULL;
+ }
+
+ gimple_ssa_operands (cfun)->free_defs = NULL;
+ gimple_ssa_operands (cfun)->free_uses = NULL;
+
+ while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
+ {
+ gimple_ssa_operands (cfun)->operand_memory
+ = gimple_ssa_operands (cfun)->operand_memory->next;
ggc_free (ptr);
}
- VEC_free (tree, heap, clobbered_v_may_defs);
- VEC_free (tree, heap, clobbered_vuses);
- VEC_free (tree, heap, ro_call_vuses);
- ops_active = false;
+ for (ix = 0;
+ VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
+ ix++)
+ {
+ if (mpt)
+ BITMAP_FREE (MPT_SYMBOLS (mpt));
+ }
+
+ VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
+
+ gimple_ssa_operands (cfun)->ops_active = false;
+
+ if (!n_initialized)
+ bitmap_obstack_release (&operands_bitmap_obstack);
+ if (dump_file && (dump_flags & TDF_STATS))
+ {
+ fprintf (dump_file, "Original clobbered vars: %d\n",
+ clobber_stats.clobbered_vars);
+ fprintf (dump_file, "Static write clobbers avoided: %d\n",
+ clobber_stats.static_write_clobbers_avoided);
+ fprintf (dump_file, "Static read clobbers avoided: %d\n",
+ clobber_stats.static_read_clobbers_avoided);
+ fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
+ clobber_stats.unescapable_clobbers_avoided);
+ fprintf (dump_file, "Original read-only clobbers: %d\n",
+ clobber_stats.readonly_clobbers);
+ fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
+ clobber_stats.static_readonly_clobbers_avoided);
+ }
}
ssa_operand_alloc (unsigned size)
{
char *ptr;
- if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
+
+ if (gimple_ssa_operands (cfun)->operand_memory_index + size
+ >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
{
struct ssa_operand_memory_d *ptr;
- ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
- ptr->next = operand_memory;
- operand_memory = ptr;
- operand_memory_index = 0;
+
+ if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_1 * sizeof (struct voptype_d);
+ else
+ if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ == OP_SIZE_1 * sizeof (struct voptype_d))
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_2 * sizeof (struct voptype_d);
+ else
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_3 * sizeof (struct voptype_d);
+
+ /* Go right to the maximum size if the request is too large. */
+ if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_3 * sizeof (struct voptype_d);
+
+ /* Fail if there is not enough space. If there are this many operands
+ required, first make sure there isn't a different problem causing this
+ many operands. If the decision is that this is OK, then we can
+ specially allocate a buffer just for this request. */
+ gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
+ ptr = (struct ssa_operand_memory_d *)
+ ggc_alloc (sizeof (struct ssa_operand_memory_d)
+ + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
+ ptr->next = gimple_ssa_operands (cfun)->operand_memory;
+ gimple_ssa_operands (cfun)->operand_memory = ptr;
+ gimple_ssa_operands (cfun)->operand_memory_index = 0;
}
- ptr = &(operand_memory->mem[operand_memory_index]);
- operand_memory_index += size;
+ ptr = &(gimple_ssa_operands (cfun)->operand_memory
+ ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
+ gimple_ssa_operands (cfun)->operand_memory_index += size;
return ptr;
}
-/* Make sure PTR is in the correct immediate use list. Since uses are simply
- pointers into the stmt TREE, there is no way of telling if anyone has
- changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
- The contents are different, but the pointer is still the same. This
- routine will check to make sure PTR is in the correct list, and if it isn't
- put it in the correct list. We cannot simply check the previous node
- because all nodes in the same stmt might have be changed. */
+/* Allocate a DEF operand. */
-static inline void
-correct_use_link (use_operand_p ptr, tree stmt)
+static inline struct def_optype_d *
+alloc_def (void)
{
- use_operand_p prev;
- tree root;
-
- /* Fold_stmt () may have changed the stmt pointers. */
- if (ptr->stmt != stmt)
- ptr->stmt = stmt;
-
- prev = ptr->prev;
- if (prev)
+ struct def_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_defs)
{
- bool stmt_mod = true;
- /* Find the first element which isn't a SAFE iterator, is in a different
- stmt, and is not a modified stmt. That node is in the correct list,
- see if we are too. */
-
- while (stmt_mod)
- {
- while (prev->stmt == stmt || prev->stmt == NULL)
- prev = prev->prev;
- if (prev->use == NULL)
- stmt_mod = false;
- else
- if ((stmt_mod = stmt_modified_p (prev->stmt)))
- prev = prev->prev;
- }
-
- /* Get the ssa_name of the list the node is in. */
- if (prev->use == NULL)
- root = prev->stmt;
- else
- root = *(prev->use);
- /* If it's the right list, simply return. */
- if (root == *(ptr->use))
- return;
+ ret = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs
+ = gimple_ssa_operands (cfun)->free_defs->next;
}
- /* Its in the wrong list if we reach here. */
- delink_imm_use (ptr);
- link_imm_use (ptr, *(ptr->use));
+ else
+ ret = (struct def_optype_d *)
+ ssa_operand_alloc (sizeof (struct def_optype_d));
+ return ret;
}
-#define FINALIZE_OPBUILD build_defs
-#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
-#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
-#define FINALIZE_FUNC finalize_ssa_def_ops
-#define FINALIZE_ALLOC alloc_def
-#define FINALIZE_FREE free_defs
-#define FINALIZE_TYPE struct def_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
-#define FINALIZE_OPS DEF_OPS
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
-#include "tree-ssa-opfinalize.h"
-
-
-/* This routine will create stmt operands for STMT from the def build list. */
+/* Allocate a USE operand. */
-static void
-finalize_ssa_defs (tree stmt)
-{
- unsigned int num = opbuild_num_elems (&build_defs);
- /* There should only be a single real definition per assignment. */
- gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
-
- /* If there is an old list, often the new list is identical, or close, so
- find the elements at the beginning that are the same as the vector. */
-
- finalize_ssa_def_ops (stmt);
- opbuild_clear (&build_defs);
-}
-
-#define FINALIZE_OPBUILD build_uses
-#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
-#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
-#define FINALIZE_FUNC finalize_ssa_use_ops
-#define FINALIZE_ALLOC alloc_use
-#define FINALIZE_FREE free_uses
-#define FINALIZE_TYPE struct use_optype_d
-#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
-#define FINALIZE_OPS USE_OPS
-#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
-#define FINALIZE_BASE(VAR) VAR
-#define FINALIZE_BASE_TYPE tree *
-#define FINALIZE_BASE_ZERO NULL
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_ptr.use = (VAL); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- *(VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
-
-static void
-finalize_ssa_uses (tree stmt)
-{
-#ifdef ENABLE_CHECKING
- {
- unsigned x;
- unsigned num = opbuild_num_elems (&build_uses);
-
- /* If the pointer to the operand is the statement itself, something is
- wrong. It means that we are pointing to a local variable (the
- initial call to get_stmt_operands does not pass a pointer to a
- statement). */
- for (x = 0; x < num; x++)
- gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
- }
-#endif
- finalize_ssa_use_ops (stmt);
- opbuild_clear (&build_uses);
-}
-
-
-/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
-#define FINALIZE_OPBUILD build_v_may_defs
-#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
-#define FINALIZE_FUNC finalize_ssa_v_may_def_ops
-#define FINALIZE_ALLOC alloc_maydef
-#define FINALIZE_FREE free_maydefs
-#define FINALIZE_TYPE struct maydef_optype_d
-#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
-#define FINALIZE_OPS MAYDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
- ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-
-static void
-finalize_ssa_v_may_defs (tree stmt)
+static inline struct use_optype_d *
+alloc_use (void)
{
- finalize_ssa_v_may_def_ops (stmt);
+ struct use_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_uses)
+ {
+ ret = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses
+ = gimple_ssa_operands (cfun)->free_uses->next;
+ }
+ else
+ ret = (struct use_optype_d *)
+ ssa_operand_alloc (sizeof (struct use_optype_d));
+ return ret;
}
-
-/* Clear the in_list bits and empty the build array for v_may_defs. */
-static inline void
-cleanup_v_may_defs (void)
+/* Allocate a vop with NUM elements. */
+
+static inline struct voptype_d *
+alloc_vop (int num)
{
- unsigned x, num;
- num = opbuild_num_elems (&build_v_may_defs);
+ struct voptype_d *ret = NULL;
+ int alloc_size = 0;
- for (x = 0; x < num; x++)
+ int bucket = vop_free_bucket_index (num);
+ if (bucket != -1)
{
- tree t = opbuild_elem_virtual (&build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
+ /* If there is a free operand, use it. */
+ if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
{
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
}
+ else
+ alloc_size = vop_free_bucket_size(bucket);
}
- opbuild_clear (&build_v_may_defs);
-}
-
-
-#define FINALIZE_OPBUILD build_vuses
-#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
-#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
-#define FINALIZE_FUNC finalize_ssa_vuse_ops
-#define FINALIZE_ALLOC alloc_vuse
-#define FINALIZE_FREE free_vuses
-#define FINALIZE_TYPE struct vuse_optype_d
-#define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
-#define FINALIZE_OPS VUSE_OPS
-#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
- ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->use_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->use_var); \
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-
-
-/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
-
-static void
-finalize_ssa_vuses (tree stmt)
-{
- unsigned num, num_v_may_defs;
- int vuse_index;
-
- /* Remove superfluous VUSE operands. If the statement already has a
- V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
- needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
- suppose that variable 'a' is aliased:
+ else
+ alloc_size = num;
- # VUSE <a_2>
- # a_3 = V_MAY_DEF <a_2>
- a = a + 1;
+ if (alloc_size > 0)
+ ret = (struct voptype_d *)ssa_operand_alloc (
+ sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
- The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
- operation. */
+ VUSE_VECT_NUM_ELEM (ret->usev) = num;
+ return ret;
+}
- num = opbuild_num_elems (&build_vuses);
- num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
- if (num > 0 && num_v_may_defs > 0)
- {
- int last = OPBUILD_LAST;
- vuse_index = opbuild_first (&build_vuses);
- for ( ; vuse_index != OPBUILD_LAST; )
- {
- tree vuse;
- vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
- if (TREE_CODE (vuse) != SSA_NAME)
- {
- var_ann_t ann = var_ann (vuse);
- ann->in_vuse_list = 0;
- if (ann->in_v_may_def_list)
- {
- vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
- last);
- continue;
- }
- }
- last = vuse_index;
- vuse_index = opbuild_next (&build_vuses, vuse_index);
- }
- }
- else
- /* Clear out the in_list bits. */
- for (vuse_index = opbuild_first (&build_vuses);
- vuse_index != OPBUILD_LAST;
- vuse_index = opbuild_next (&build_vuses, vuse_index))
- {
- tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
+/* This routine makes sure that PTR is in an immediate use list, and makes
+ sure the stmt pointer is set to the current stmt. */
- finalize_ssa_vuse_ops (stmt);
- /* The v_may_def build vector wasn't cleaned up because we needed it. */
- cleanup_v_may_defs ();
-
- /* Free the vuses build vector. */
- opbuild_clear (&build_vuses);
+static inline void
+set_virtual_use_link (use_operand_p ptr, tree stmt)
+{
+ /* fold_stmt may have changed the stmt pointers. */
+ if (ptr->stmt != stmt)
+ ptr->stmt = stmt;
+ /* If this use isn't in a list, add it to the correct list. */
+ if (!ptr->prev)
+ link_imm_use (ptr, *(ptr->use));
}
-
-/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
-
-#define FINALIZE_OPBUILD build_v_must_defs
-#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
-#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
-#define FINALIZE_FUNC finalize_ssa_v_must_def_ops
-#define FINALIZE_ALLOC alloc_mustdef
-#define FINALIZE_FREE free_mustdefs
-#define FINALIZE_TYPE struct mustdef_optype_d
-#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
-#define FINALIZE_OPS MUSTDEF_OPS
-#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
-#define FINALIZE_BASE_ZERO 0
-#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
- ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
-#define FINALIZE_BASE_TYPE unsigned
-#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
- (PTR)->def_var = (VAL); \
- (PTR)->kill_var = (VAL); \
- (PTR)->use_ptr.use = &((PTR)->kill_var);\
- link_imm_use_stmt (&((PTR)->use_ptr), \
- (VAL), (STMT))
-#include "tree-ssa-opfinalize.h"
-static void
-finalize_ssa_v_must_defs (tree stmt)
+/* Adds OP to the list of defs after LAST. */
+
+static inline def_optype_p
+add_def_op (tree *op, def_optype_p last)
{
- /* In the presence of subvars, there may be more than one V_MUST_DEF per
- statement (one for each subvar). It is a bit expensive to verify that
- all must-defs in a statement belong to subvars if there is more than one
- MUST-def, so we don't do it. Suffice to say, if you reach here without
- having subvars, and have num >1, you have hit a bug. */
+ def_optype_p new;
- finalize_ssa_v_must_def_ops (stmt);
- opbuild_clear (&build_v_must_defs);
+ new = alloc_def ();
+ DEF_OP_PTR (new) = op;
+ last->next = new;
+ new->next = NULL;
+ return new;
}
-/* Finalize all the build vectors, fill the new ones into INFO. */
-
-static inline void
-finalize_ssa_stmt_operands (tree stmt)
+/* Adds OP to the list of uses of statement STMT after LAST. */
+
+static inline use_optype_p
+add_use_op (tree stmt, tree *op, use_optype_p last)
{
- finalize_ssa_defs (stmt);
- finalize_ssa_uses (stmt);
- finalize_ssa_v_must_defs (stmt);
- finalize_ssa_v_may_defs (stmt);
- finalize_ssa_vuses (stmt);
+ use_optype_p new;
+
+ new = alloc_use ();
+ USE_OP_PTR (new)->use = op;
+ link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
+ last->next = new;
+ new->next = NULL;
+ return new;
}
-/* Start the process of building up operands vectors in INFO. */
+/* Return a virtual op pointer with NUM elements which are all initialized to OP
+ and are linked into the immediate uses for STMT. The new vop is appended
+ after PREV. */
-static inline void
-start_ssa_stmt_operands (void)
+static inline voptype_p
+add_vop (tree stmt, tree op, int num, voptype_p prev)
{
- gcc_assert (opbuild_num_elems (&build_defs) == 0);
- gcc_assert (opbuild_num_elems (&build_uses) == 0);
- gcc_assert (opbuild_num_elems (&build_vuses) == 0);
- gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
- gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
+ voptype_p new;
+ int x;
+
+ new = alloc_vop (num);
+ for (x = 0; x < num; x++)
+ {
+ VUSE_OP_PTR (new, x)->prev = NULL;
+ SET_VUSE_OP (new, x, op);
+ VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
+ link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
+ }
+
+ if (prev)
+ prev->next = new;
+ new->next = NULL;
+ return new;
}
-/* Add DEF_P to the list of pointers to operands. */
+/* Adds OP to the list of vuses of statement STMT after LAST, and moves
+ LAST to the new element. */
-static inline void
-append_def (tree *def_p)
+static inline voptype_p
+add_vuse_op (tree stmt, tree op, int num, voptype_p last)
{
- opbuild_append_real (&build_defs, def_p);
+ voptype_p new = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new) = NULL_TREE;
+ return new;
}
-/* Add USE_P to the list of pointers to operands. */
+/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
+ LAST to the new element. */
-static inline void
-append_use (tree *use_p)
+static inline voptype_p
+add_vdef_op (tree stmt, tree op, int num, voptype_p last)
{
- opbuild_append_real (&build_uses, use_p);
+ voptype_p new = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new) = op;
+ return new;
}
+
+/* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
+ is the head of the operand list it belongs to. */
-/* Add a new virtual may def for variable VAR to the build array. */
-
-static inline void
-append_v_may_def (tree var)
+static inline struct voptype_d *
+realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
+ struct voptype_d **root)
{
- if (TREE_CODE (var) != SSA_NAME)
- {
- var_ann_t ann = get_var_ann (var);
-
- /* Don't allow duplicate entries. */
- if (ann->in_v_may_def_list)
- return;
- ann->in_v_may_def_list = 1;
- }
+ unsigned int x, lim;
+ tree stmt, val;
+ struct voptype_d *ret, *tmp;
- opbuild_append_virtual (&build_v_may_defs, var);
-}
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
+ return ptr;
+ val = VUSE_OP (ptr, 0);
+ if (TREE_CODE (val) == SSA_NAME)
+ val = SSA_NAME_VAR (val);
-/* Add VAR to the list of virtual uses. */
+ stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
-static inline void
-append_vuse (tree var)
-{
+ /* Delink all the existing uses. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
+ {
+ use_operand_p use_p = VUSE_OP_PTR (ptr, x);
+ delink_imm_use (use_p);
+ }
- /* Don't allow duplicate entries. */
- if (TREE_CODE (var) != SSA_NAME)
+ /* If we want less space, simply use this one, and shrink the size. */
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
{
- var_ann_t ann = get_var_ann (var);
+ VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
+ return ptr;
+ }
- if (ann->in_vuse_list || ann->in_v_may_def_list)
- return;
- ann->in_vuse_list = 1;
+ /* It is growing. Allocate a new one and replace the old one. */
+ ret = add_vuse_op (stmt, val, num_elem, ptr);
+
+ /* Clear PTR and add its memory to the free list. */
+ lim = VUSE_VECT_NUM_ELEM (ptr->usev);
+ memset (ptr, 0,
+ sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
+ add_vop_to_freelist (ptr);
+
+ /* Now simply remove the old one. */
+ if (*root == ptr)
+ {
+ *root = ret;
+ return ret;
}
+ else
+ for (tmp = *root;
+ tmp != NULL && tmp->next != ptr;
+ tmp = tmp->next)
+ {
+ tmp->next = ret;
+ return ret;
+ }
- opbuild_append_virtual (&build_vuses, var);
+ /* The pointer passed in isn't in STMT's VDEF lists. */
+ gcc_unreachable ();
}
+
+/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
-/* Add VAR to the list of virtual must definitions for INFO. */
-
-static inline void
-append_v_must_def (tree var)
+struct voptype_d *
+realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
{
- unsigned i;
-
- /* Don't allow duplicate entries. */
- for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
- if (var == opbuild_elem_virtual (&build_v_must_defs, i))
- return;
+ tree val, stmt;
+ struct voptype_d *ret;
- opbuild_append_virtual (&build_v_must_defs, var);
+ val = VDEF_RESULT (ptr);
+ stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
+ ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
+ VDEF_RESULT (ret) = val;
+ return ret;
}
+
+/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
-/* Parse STMT looking for operands. OLD_OPS is the original stmt operand
- cache for STMT, if it existed before. When finished, the various build_*
- operand vectors will have potential operands. in them. */
-
-static void
-parse_ssa_operands (tree stmt)
+struct voptype_d *
+realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
{
- enum tree_code code;
+ tree stmt;
+ struct voptype_d *ret;
- code = TREE_CODE (stmt);
- switch (code)
- {
- case MODIFY_EXPR:
- /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
- either only part of LHS is modified or if the RHS might throw,
- otherwise, use V_MUST_DEF.
+ stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
+ ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
+ return ret;
+}
- ??? If it might throw, we should represent somehow that it is killed
- on the fallthrough path. */
- {
- tree lhs = TREE_OPERAND (stmt, 0);
- int lhs_flags = opf_is_def;
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
+/* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Make build_defs VEC of tree *. */
- /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
- or not the entire LHS is modified; that depends on what's
- inside the VIEW_CONVERT_EXPR. */
- if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
- lhs = TREE_OPERAND (lhs, 0);
+static inline void
+finalize_ssa_defs (tree stmt)
+{
+ unsigned new_i;
+ struct def_optype_d new_list;
+ def_optype_p old_ops, last;
+ unsigned int num = VEC_length (tree, build_defs);
- if (TREE_CODE (lhs) != ARRAY_REF
- && TREE_CODE (lhs) != ARRAY_RANGE_REF
- && TREE_CODE (lhs) != BIT_FIELD_REF
- && TREE_CODE (lhs) != REALPART_EXPR
- && TREE_CODE (lhs) != IMAGPART_EXPR)
- lhs_flags |= opf_kill_def;
+ /* There should only be a single real definition per assignment. */
+ gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
- }
- break;
+ new_list.next = NULL;
+ last = &new_list;
- case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
- break;
+ old_ops = DEF_OPS (stmt);
- case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
- break;
+ new_i = 0;
- case ASM_EXPR:
- get_asm_expr_operands (stmt);
- break;
+ /* Check for the common case of 1 def that hasn't changed. */
+ if (old_ops && old_ops->next == NULL && num == 1
+ && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
+ return;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ old_ops->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = old_ops;
+ }
+
+ /* If there is anything remaining in the build_defs list, simply emit it. */
+ for ( ; new_i < num; new_i++)
+ last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
+
+ /* Now set the stmt's operands. */
+ DEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ def_optype_p ptr;
+ unsigned x = 0;
+ for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == num);
+ }
+#endif
+}
+
+
+/* Takes elements from build_uses and turns them into use operands of STMT.
+ TODO -- Make build_uses VEC of tree *. */
+
+static inline void
+finalize_ssa_uses (tree stmt)
+{
+ unsigned new_i;
+ struct use_optype_d new_list;
+ use_optype_p old_ops, ptr, last;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x;
+ unsigned num = VEC_length (tree, build_uses);
+
+ /* If the pointer to the operand is the statement itself, something is
+ wrong. It means that we are pointing to a local variable (the
+ initial call to update_stmt_operands does not pass a pointer to a
+ statement). */
+ for (x = 0; x < num; x++)
+ gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
+ }
+#endif
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = USE_OPS (stmt);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (USE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = old_ops;
+ }
+
+ /* Now create nodes for all the new nodes. */
+ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ last = add_use_op (stmt,
+ (tree *) VEC_index (tree, build_uses, new_i),
+ last);
+
+ /* Now set the stmt's operands. */
+ USE_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_uses));
+ }
+#endif
+}
+
+
+/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
+ STMT. FIXME, for now VDEF operators should have a single operand
+ in their RHS. */
+
+static inline void
+finalize_ssa_vdefs (tree stmt)
+{
+ unsigned new_i;
+ struct voptype_d new_list;
+ voptype_p old_ops, ptr, last;
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ /* Set the symbols referenced by STMT. */
+ if (!bitmap_empty_p (build_stores))
+ {
+ if (ann->operands.stores == NULL)
+ ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+
+ bitmap_copy (ann->operands.stores, build_stores);
+ }
+ else
+ BITMAP_FREE (ann->operands.stores);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = VDEF_OPS (stmt);
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_vdefs))
+ {
+ tree op = VEC_index (tree, build_vdefs, new_i);
+ unsigned new_uid = get_name_decl (op);
+ unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
+
+ /* FIXME, for now each VDEF operator should have at most one
+ operand in their RHS. */
+ gcc_assert (VDEF_NUM (old_ops) == 1);
+
+ if (old_uid == new_uid)
+ {
+ /* If the symbols are the same, reuse the existing operand. */
+ last->next = old_ops;
+ last = old_ops;
+ old_ops = old_ops->next;
+ last->next = NULL;
+ set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
+ new_i++;
+ }
+ else if (old_uid < new_uid)
+ {
+ /* If old is less than new, old goes to the free list. */
+ voptype_p next;
+ delink_imm_use (VDEF_OP_PTR (old_ops, 0));
+ next = old_ops->next;
+ add_vop_to_freelist (old_ops);
+ old_ops = next;
+ }
+ else
+ {
+ /* This is a new operand. */
+ last = add_vdef_op (stmt, op, 1, last);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
+ last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (ptr = old_ops; ptr; ptr = last)
+ {
+ last = ptr->next;
+ delink_imm_use (VDEF_OP_PTR (ptr, 0));
+ add_vop_to_freelist (ptr);
+ }
+ }
+
+ /* Now set STMT's operands. */
+ VDEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_vdefs));
+ }
+#endif
+}
+
+
+/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
+ STMT. */
+
+static inline void
+finalize_ssa_vuse_ops (tree stmt)
+{
+ unsigned new_i, old_i;
+ voptype_p old_ops, last;
+ VEC(tree,heap) *new_ops;
+ stmt_ann_t ann;
+
+ /* Set the symbols referenced by STMT. */
+ ann = stmt_ann (stmt);
+ if (!bitmap_empty_p (build_loads))
+ {
+ if (ann->operands.loads == NULL)
+ ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
+
+ bitmap_copy (ann->operands.loads, build_loads);
+ }
+ else
+ BITMAP_FREE (ann->operands.loads);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
+
+ /* STMT should have at most one VUSE operator. */
+ old_ops = VUSE_OPS (stmt);
+ gcc_assert (old_ops == NULL || old_ops->next == NULL);
+
+ new_ops = NULL;
+ new_i = old_i = 0;
+ while (old_ops
+ && old_i < VUSE_NUM (old_ops)
+ && new_i < VEC_length (tree, build_vuses))
+ {
+ tree new_op = VEC_index (tree, build_vuses, new_i);
+ tree old_op = VUSE_OP (old_ops, old_i);
+ unsigned new_uid = get_name_decl (new_op);
+ unsigned old_uid = get_name_decl (old_op);
+
+ if (old_uid == new_uid)
+ {
+ /* If the symbols are the same, reuse the existing operand. */
+ VEC_safe_push (tree, heap, new_ops, old_op);
+ new_i++;
+ old_i++;
+ }
+ else if (old_uid < new_uid)
+ {
+ /* If OLD_UID is less than NEW_UID, the old operand has
+ disappeared, skip to the next old operand. */
+ old_i++;
+ }
+ else
+ {
+ /* This is a new operand. */
+ VEC_safe_push (tree, heap, new_ops, new_op);
+ new_i++;
+ }
+ }
+
+ /* If there is anything remaining in the build_vuses list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
+ VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
+ {
+ for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
+ delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
+ add_vop_to_freelist (old_ops);
+ VUSE_OPS (stmt) = NULL;
+ }
+
+ /* If there are any operands, instantiate a VUSE operator for STMT. */
+ if (new_ops)
+ {
+ tree op;
+ unsigned i;
+
+ last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
+
+ for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
+ SET_USE (VUSE_OP_PTR (last, (int) i), op);
+
+ VUSE_OPS (stmt) = last;
+ }
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x;
+
+ if (VUSE_OPS (stmt))
+ {
+ gcc_assert (VUSE_OPS (stmt)->next == NULL);
+ x = VUSE_NUM (VUSE_OPS (stmt));
+ }
+ else
+ x = 0;
+
+ gcc_assert (x == VEC_length (tree, build_vuses));
+ }
+#endif
+}
+
+/* Return a new VUSE operand vector for STMT. */
+
+static void
+finalize_ssa_vuses (tree stmt)
+{
+ unsigned num, num_vdefs;
+ unsigned vuse_index;
+
+ /* Remove superfluous VUSE operands. If the statement already has a
+ VDEF operator for a variable 'a', then a VUSE for 'a' is not
+ needed because VDEFs imply a VUSE of the variable. For instance,
+ suppose that variable 'a' is pointed-to by p and q:
+
+ # VUSE <a_2>
+ # a_3 = VDEF <a_2>
+ *p = *q;
+
+ The VUSE <a_2> is superfluous because it is implied by the
+ VDEF operator. */
+ num = VEC_length (tree, build_vuses);
+ num_vdefs = VEC_length (tree, build_vdefs);
+
+ if (num > 0 && num_vdefs > 0)
+ for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
+ {
+ tree vuse;
+ vuse = VEC_index (tree, build_vuses, vuse_index);
+ if (TREE_CODE (vuse) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_vdef_list)
+ {
+ VEC_ordered_remove (tree, build_vuses, vuse_index);
+ continue;
+ }
+ }
+ vuse_index++;
+ }
+
+ finalize_ssa_vuse_ops (stmt);
+}
+
+
+/* Clear the in_list bits and empty the build array for VDEFs and
+ VUSEs. */
+
+static inline void
+cleanup_build_arrays (void)
+{
+ unsigned i;
+ tree t;
+
+ for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vdef_list = false;
+
+ for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vuse_list = false;
+
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
+ VEC_truncate (tree, build_defs, 0);
+ VEC_truncate (tree, build_uses, 0);
+ bitmap_clear (build_loads);
+ bitmap_clear (build_stores);
+}
+
+
+/* Finalize all the build vectors, fill the new ones into INFO. */
+
+static inline void
+finalize_ssa_stmt_operands (tree stmt)
+{
+ finalize_ssa_defs (stmt);
+ finalize_ssa_uses (stmt);
+ finalize_ssa_vdefs (stmt);
+ finalize_ssa_vuses (stmt);
+ cleanup_build_arrays ();
+}
+
+
+/* Start the process of building up operands vectors in INFO. */
+
+static inline void
+start_ssa_stmt_operands (void)
+{
+ gcc_assert (VEC_length (tree, build_defs) == 0);
+ gcc_assert (VEC_length (tree, build_uses) == 0);
+ gcc_assert (VEC_length (tree, build_vuses) == 0);
+ gcc_assert (VEC_length (tree, build_vdefs) == 0);
+ gcc_assert (bitmap_empty_p (build_loads));
+ gcc_assert (bitmap_empty_p (build_stores));
+}
+
+
+/* Add DEF_P to the list of pointers to operands. */
+
+static inline void
+append_def (tree *def_p)
+{
+ VEC_safe_push (tree, heap, build_defs, (tree) def_p);
+}
+
+
+/* Add USE_P to the list of pointers to operands. */
+
+static inline void
+append_use (tree *use_p)
+{
+ VEC_safe_push (tree, heap, build_uses, (tree) use_p);
+}
+
+
+/* Add VAR to the set of variables that require a VDEF operator. */
+
+static inline void
+append_vdef (tree var)
+{
+ tree sym;
+
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ tree mpt;
+ var_ann_t ann;
+
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
+
+ /* Don't allow duplicate entries. */
+ ann = get_var_ann (var);
+ if (ann->in_vdef_list)
+ return;
+
+ ann->in_vdef_list = true;
+ sym = var;
+ }
+ else
+ sym = SSA_NAME_VAR (var);
+
+ VEC_safe_push (tree, heap, build_vdefs, var);
+ bitmap_set_bit (build_stores, DECL_UID (sym));
+}
+
+
+/* Add VAR to the set of variables that require a VUSE operator. */
+
+static inline void
+append_vuse (tree var)
+{
+ tree sym;
+
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ tree mpt;
+ var_ann_t ann;
+
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
+
+ /* Don't allow duplicate entries. */
+ ann = get_var_ann (var);
+ if (ann->in_vuse_list || ann->in_vdef_list)
+ return;
+
+ ann->in_vuse_list = true;
+ sym = var;
+ }
+ else
+ sym = SSA_NAME_VAR (var);
+
+ VEC_safe_push (tree, heap, build_vuses, var);
+ bitmap_set_bit (build_loads, DECL_UID (sym));
+}
+
+
+/* REF is a tree that contains the entire pointer dereference
+ expression, if available, or NULL otherwise. ALIAS is the variable
+ we are asking if REF can access. OFFSET and SIZE come from the
+ memory access expression that generated this virtual operand. */
+
+static bool
+access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size)
+{
+ bool offsetgtz = offset > 0;
+ unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
+ tree base = ref ? get_base_address (ref) : NULL;
+
+ /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
+ using a call-clobbered memory tag. By definition, call-clobbered
+ memory tags can always touch .GLOBAL_VAR. */
+ if (alias == gimple_global_var (cfun))
+ return true;
+
+ /* If ALIAS is an SFT, it can't be touched if the offset
+ and size of the access is not overlapping with the SFT offset and
+ size. This is only true if we are accessing through a pointer
+ to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
+ be accessing through a pointer to some substruct of the
+ structure, and if we try to prune there, we will have the wrong
+ offset, and get the wrong answer.
+ i.e., we can't prune without more work if we have something like
+
+ struct gcc_target
+ {
+ struct asm_out
+ {
+ const char *byte_op;
+ struct asm_int_op
+ {
+ const char *hi;
+ } aligned_op;
+ } asm_out;
+ } targetm;
+
+ foo = &targetm.asm_out.aligned_op;
+ return foo->hi;
+
+ SFT.1, which represents hi, will have SFT_OFFSET=32 because in
+ terms of SFT_PARENT_VAR, that is where it is.
+ However, the access through the foo pointer will be at offset 0. */
+ if (size != -1
+ && TREE_CODE (alias) == STRUCT_FIELD_TAG
+ && base
+ && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
+ && !overlap_subvar (offset, size, alias, NULL))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
+
+ /* Without strict aliasing, it is impossible for a component access
+ through a pointer to touch a random variable, unless that
+ variable *is* a structure or a pointer.
+
+ That is, given p->c, and some random global variable b,
+ there is no legal way that p->c could be an access to b.
+
+ Without strict aliasing on, we consider it legal to do something
+ like:
+
+ struct foos { int l; };
+ int foo;
+ static struct foos *getfoo(void);
+ int main (void)
+ {
+ struct foos *f = getfoo();
+ f->l = 1;
+ foo = 2;
+ if (f->l == 1)
+ abort();
+ exit(0);
+ }
+ static struct foos *getfoo(void)
+ { return (struct foos *)&foo; }
+
+ (taken from 20000623-1.c)
+
+ The docs also say/imply that access through union pointers
+ is legal (but *not* if you take the address of the union member,
+ i.e. the inverse), such that you can do
+
+ typedef union {
+ int d;
+ } U;
+
+ int rv;
+ void breakme()
+ {
+ U *rv0;
+ U *pretmp = (U*)&rv;
+ rv0 = pretmp;
+ rv0->d = 42;
+ }
+ To implement this, we just punt on accesses through union
+ pointers entirely.
+ */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && (TREE_CODE (base) != INDIRECT_REF
+ || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
+ && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
+ && !var_ann (alias)->is_heapvar
+ /* When the struct has may_alias attached to it, we need not to
+ return true. */
+ && get_alias_set (base))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
+
+ /* If the offset of the access is greater than the size of one of
+ the possible aliases, it can't be touching that alias, because it
+ would be past the end of the structure. */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && !POINTER_TYPE_P (TREE_TYPE (alias))
+ && offsetgtz
+ && DECL_SIZE (alias)
+ && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
+ && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
+
+ return true;
+}
+
+
+/* Add VAR to the virtual operands array. FLAGS is as in
+ get_expr_operands. FULL_REF is a tree that contains the entire
+ pointer dereference expression, if available, or NULL otherwise.
+ OFFSET and SIZE come from the memory access expression that
+ generated this virtual operand. IS_CALL_SITE is true if the
+ affected statement is a call site. */
+
+static void
+add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
+ tree full_ref, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size, bool is_call_site)
+{
+ bitmap aliases = NULL;
+ tree sym;
+ var_ann_t v_ann;
+
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark the statement as having memory operands. */
+ s_ann->references_memory = true;
+
+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
+
+ /* If the variable cannot be modified and this is a VDEF change
+ it into a VUSE. This happens when read-only variables are marked
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
+ into validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_implicit) && unmodifiable_var_p (var))
+ flags &= ~opf_def;
+
+ /* The variable is not a GIMPLE register. Add it (or its aliases) to
+ virtual operands, unless the caller has specifically requested
+ not to add virtual operands (used when adding operands inside an
+ ADDR_EXPR expression). */
+ if (flags & opf_no_vops)
+ return;
+
+ if (MTAG_P (var))
+ aliases = MTAG_ALIASES (var);
+
+ if (aliases == NULL)
+ {
+ if (s_ann && !gimple_aliases_computed_p (cfun))
+ s_ann->has_volatile_ops = true;
+
+ /* The variable is not aliased or it is an alias tag. */
+ if (flags & opf_def)
+ append_vdef (var);
+ else
+ append_vuse (var);
+ }
+ else
+ {
+ bitmap_iterator bi;
+ unsigned int i;
+ tree al;
+
+ /* The variable is aliased. Add its aliases to the virtual
+ operands. */
+ gcc_assert (!bitmap_empty_p (aliases));
+
+ if (flags & opf_def)
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vdef (al);
+ }
+
+ /* If the variable is also an alias tag, add a virtual
+ operand for it, otherwise we will miss representing
+ references to the members of the variable's alias set.
+ This fixes the bug in gcc.c-torture/execute/20020503-1.c.
+
+ It is also necessary to add bare defs on clobbers for
+ SMT's, so that bare SMT uses caused by pruning all the
+ aliases will link up properly with calls. In order to
+ keep the number of these bare defs we add down to the
+ minimum necessary, we keep track of which SMT's were used
+ alone in statement vdefs or VUSEs. */
+ if (none_added
+ || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ && is_call_site))
+ {
+ append_vdef (var);
+ }
+ }
+ else
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vuse (al);
+ }
+
+ /* Even if no aliases have been added, we still need to
+ establish def-use and use-def chains, lest
+ transformations think that this is not a memory
+ reference. For an example of this scenario, see
+ testsuite/g++.dg/opt/cleanup1.C. */
+ if (none_added)
+ append_vuse (var);
+ }
+ }
+}
+
+
+/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
+ get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
+ the statement's real operands, otherwise it is added to virtual
+ operands. */
+
+static void
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+{
+ tree var, sym;
+ var_ann_t v_ann;
+
+ gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+
+ var = *var_p;
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
+ s_ann->has_volatile_ops = true;
+
+ if (is_gimple_reg (sym))
+ {
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
+}
+
+
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
+
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
+
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known.
+
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
+
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags,
+ tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ tree ptr = *pptr;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ s_ann->references_memory = true;
+ if (s_ann && TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
+
+ if (SSA_VAR_P (ptr))
+ {
+ struct ptr_info_def *pi = NULL;
+
+ /* If PTR has flow-sensitive points-to information, use it. */
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
+ {
+ /* PTR has its own memory tag. Use it. */
+ add_virtual_operand (pi->name_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+ }
+ else
+ {
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its symbol memory tag. */
+ var_ann_t v_ann;
+
+ /* If we are emitting debugging dumps, display a warning if
+ PTR is an SSA_NAME with no flow-sensitive alias
+ information. That means that we may need to compute
+ aliasing again. */
+ if (dump_file
+ && TREE_CODE (ptr) == SSA_NAME
+ && pi == NULL)
+ {
+ fprintf (dump_file,
+ "NOTE: no flow-sensitive alias info for ");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, " in ");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ }
+
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+ v_ann = var_ann (ptr);
+
+ if (v_ann->symbol_mem_tag)
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+
+ /* Aliasing information is missing; mark statement as
+ volatile so we won't optimize it out too actively. */
+ else if (s_ann
+ && !gimple_aliases_computed_p (cfun)
+ && (flags & opf_def))
+ s_ann->has_volatile_ops = true;
+ }
+ }
+ else if (TREE_CODE (ptr) == INTEGER_CST)
+ {
+ /* If a constant is used as a pointer, we can't generate a real
+ operand for it but we mark the statement volatile to prevent
+ optimizations from messing things up. */
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+ else
+ {
+ /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
+ gcc_unreachable ();
+ }
+
+ /* If requested, add a USE operand for the base pointer. */
+ if (recurse_on_base)
+ get_expr_operands (stmt, pptr, opf_use);
+}
- case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
- break;
- case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
- break;
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
- case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
- break;
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag, ref;
+ HOST_WIDE_INT offset, size, maxsize;
+ subvar_t svars, sv;
+ stmt_ann_t s_ann = stmt_ann (stmt);
- /* These nodes contain no variable references. */
- case BIND_EXPR:
- case CASE_LABEL_EXPR:
- case TRY_CATCH_EXPR:
- case TRY_FINALLY_EXPR:
- case EH_FILTER_EXPR:
- case CATCH_EXPR:
- case RESX_EXPR:
- break;
+ /* This statement references memory. */
+ s_ann->references_memory = 1;
- default:
- /* Notice that if get_expr_operands tries to use &STMT as the operand
- pointer (which may only happen for USE operands), we will fail in
- append_use. This default will handle statements like empty
- statements, or CALL_EXPRs that may appear on the RHS of a statement
- or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
- break;
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
+
+ if (TMR_SYMBOL (expr))
+ add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
+
+ tag = TMR_TAG (expr);
+ if (!tag)
+ {
+ /* Something weird, so ensure that we will be careful. */
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+
+ if (DECL_P (tag))
+ {
+ get_expr_operands (stmt, &tag, flags);
+ return;
}
-}
-/* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
- original operands, and if ANN is non-null, appropriate stmt flags are set
- in the stmt's annotation. If ANN is NULL, this is not considered a "real"
- stmt, and none of the operands will be entered into their respective
- immediate uses tables. This is to allow stmts to be processed when they
- are not actually in the CFG.
+ ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
+ gcc_assert (ref != NULL_TREE);
+ svars = get_subvars_for_var (ref);
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bool exact;
- Note that some fields in old_ops may change to NULL, although none of the
- memory they originally pointed to will be destroyed. It is appropriate
- to call free_stmt_operands() on the value returned in old_ops.
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ add_stmt_operand (&sv->var, s_ann, flags);
+ }
+}
- The rationale for this: Certain optimizations wish to examine the difference
- between new_ops and old_ops after processing. If a set of operands don't
- change, new_ops will simply assume the pointer in old_ops, and the old_ops
- pointer will be set to NULL, indicating no memory needs to be cleared.
- Usage might appear something like:
- old_ops_copy = old_ops = stmt_ann(stmt)->operands;
- build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
- <* compare old_ops_copy and new_ops *>
- free_ssa_operands (old_ops); */
+/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
+ clobbered variables in the function. */
static void
-build_ssa_operands (tree stmt)
+add_call_clobber_ops (tree stmt, tree callee)
{
- stmt_ann_t ann = get_stmt_ann (stmt);
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b, not_written_b;
- /* Initially assume that the statement has no volatile operands, nor
- makes aliased loads or stores. */
- if (ann)
+ /* Functions that are not const, pure or never return may clobber
+ call-clobbered variables. */
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
+
+ /* If we created .GLOBAL_VAR earlier, just use it. */
+ if (gimple_global_var (cfun))
{
- ann->has_volatile_ops = false;
- ann->makes_aliased_stores = false;
- ann->makes_aliased_loads = false;
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ return;
}
- start_ssa_stmt_operands ();
-
- parse_ssa_operands (stmt);
+ /* Get info for local and module level statics. There is a bit
+ set for each static if the call being processed does not read
+ or write that variable. */
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+ not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
- finalize_ssa_stmt_operands (stmt);
-}
+ /* Add a VDEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var_lookup (u);
+ unsigned int escape_mask = var_ann (var)->escape_mask;
+ tree real_var = var;
+ bool not_read;
+ bool not_written;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
+ gcc_assert (!unmodifiable_var_p (var));
+
+ clobber_stats.clobbered_vars++;
+ /* See if this variable is really clobbered by this function. */
-/* Free any operands vectors in OPS. */
-void
-free_ssa_operands (stmt_operands_p ops)
-{
- ops->def_ops = NULL;
- ops->use_ops = NULL;
- ops->maydef_ops = NULL;
- ops->mustdef_ops = NULL;
- ops->vuse_ops = NULL;
+ /* Trivial case: Things escaping only to pure/const are not
+ clobbered by non-pure-const, and only read by pure/const. */
+ if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
+ {
+ tree call = get_call_expr_in (stmt);
+ if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ {
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ else
+ {
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ }
+
+ if (not_written)
+ {
+ clobber_stats.static_write_clobbers_avoided++;
+ if (!not_read)
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ else
+ clobber_stats.static_read_clobbers_avoided++;
+ }
+ else
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ }
}
-/* Get the operands of statement STMT. Note that repeated calls to
- get_stmt_operands for the same statement will do nothing until the
- statement is marked modified by a call to mark_stmt_modified(). */
+/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
+ function. */
-void
-update_stmt_operands (tree stmt)
+static void
+add_call_read_ops (tree stmt, tree callee)
{
- stmt_ann_t ann = get_stmt_ann (stmt);
- /* If get_stmt_operands is called before SSA is initialized, dont
- do anything. */
- if (!ssa_operands_active ())
- return;
- /* The optimizers cannot handle statements that are nothing but a
- _DECL. This indicates a bug in the gimplifier. */
- gcc_assert (!SSA_VAR_P (stmt));
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b;
- gcc_assert (ann->modified);
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ return;
+ }
+
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
- timevar_push (TV_TREE_OPS);
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var (u);
+ tree real_var = var;
+ bool not_read;
+
+ clobber_stats.readonly_clobbers++;
- build_ssa_operands (stmt);
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
- /* Clear the modified bit for STMT. Subsequent calls to
- get_stmt_operands for this statement will do nothing until the
- statement is marked modified by a call to mark_stmt_modified(). */
- ann->modified = 0;
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
- timevar_pop (TV_TREE_OPS);
+ not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ if (not_read)
+ {
+ clobber_stats.static_readonly_clobbers_avoided++;
+ continue;
+ }
+
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ }
}
-
-/* Copies virtual operands from SRC to DST. */
-
-void
-copy_virtual_operands (tree dest, tree src)
-{
- tree t;
- ssa_op_iter iter, old_iter;
- use_operand_p use_p, u2;
- def_operand_p def_p, d2;
-
- build_ssa_operands (dest);
-
- /* Copy all the virtual fields. */
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
- append_vuse (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
- append_v_may_def (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
- append_v_must_def (t);
-
- if (opbuild_num_elems (&build_vuses) == 0
- && opbuild_num_elems (&build_v_may_defs) == 0
- && opbuild_num_elems (&build_v_must_defs) == 0)
- return;
- /* Now commit the virtual operands to this stmt. */
- finalize_ssa_v_must_defs (dest);
- finalize_ssa_v_may_defs (dest);
- finalize_ssa_vuses (dest);
+/* A subroutine of get_expr_operands to handle CALL_EXPR. */
- /* Finally, set the field to the same values as then originals. */
+static void
+get_call_expr_operands (tree stmt, tree expr)
+{
+ int call_flags = call_expr_flags (expr);
+ int i, nargs;
+ stmt_ann_t ann = stmt_ann (stmt);
-
- t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
- FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
- {
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, t);
- t = op_iter_next_tree (&old_iter);
- }
- gcc_assert (op_iter_done (&old_iter));
+ ann->references_memory = true;
- op_iter_init_maydef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
+ /* If aliases have been computed already, add VDEF or VUSE
+ operands for all the symbols that have been found to be
+ call-clobbered. */
+ if (gimple_aliases_computed_p (cfun)
+ && !(call_flags & ECF_NOVOPS))
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
+ if (TREE_SIDE_EFFECTS (expr)
+ && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+ add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+ else if (!(call_flags & ECF_CONST))
+ add_call_read_ops (stmt, get_callee_fndecl (expr));
}
- gcc_assert (op_iter_done (&old_iter));
- op_iter_init_mustdef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
- {
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
- }
- gcc_assert (op_iter_done (&old_iter));
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
+ nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
+ get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
}
-/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artificial stmt which looks like a load from the store, this can
- be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
-void
-create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
+static void
+get_asm_expr_operands (tree stmt)
{
- stmt_ann_t ann;
- tree op;
- ssa_op_iter iter;
- use_operand_p use_p;
- unsigned x;
-
- ann = get_stmt_ann (new_stmt);
+ stmt_ann_t s_ann;
+ int i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
+ tree link;
- /* process the stmt looking for operands. */
- start_ssa_stmt_operands ();
- parse_ssa_operands (new_stmt);
+ s_ann = stmt_ann (stmt);
+ noutputs = list_length (ASM_OUTPUTS (stmt));
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
+ /* Gather all output operands. */
+ for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
{
- tree t = opbuild_elem_virtual (&build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+
+ /* This should have been split in gimplify_asm_expr. */
+ gcc_assert (!allows_reg || !is_inout);
+
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
{
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
+
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
}
-
- for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
+
+ /* Gather all input operands. */
+ for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
{
- tree t = opbuild_elem_virtual (&build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
+
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
{
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
- }
- /* Remove any virtual operands that were found. */
- opbuild_clear (&build_v_may_defs);
- opbuild_clear (&build_v_must_defs);
- opbuild_clear (&build_vuses);
- /* For each VDEF on the original statement, we want to create a
- VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
- statement. */
- FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
- (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
- append_vuse (op);
-
- /* Now build the operands for this new stmt. */
- finalize_ssa_stmt_operands (new_stmt);
+ get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ }
- /* All uses in this fake stmt must not be in the immediate use lists. */
- FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
- delink_imm_use (use_p);
-}
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
+ for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+ if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
+ {
+ unsigned i;
+ bitmap_iterator bi;
-static void
-swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
-{
- tree op0, op1;
- op0 = *exp0;
- op1 = *exp1;
+ s_ann->references_memory = true;
- /* If the operand cache is active, attempt to preserve the relative positions
- of these two operands in their respective immediate use lists. */
- if (ssa_operands_active () && op0 != op1)
- {
- use_optype_p use0, use1, ptr;
- use0 = use1 = NULL;
- /* Find the 2 operands in the cache, if they are there. */
- for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
- if (USE_OP_PTR (ptr)->use == exp0)
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
{
- use0 = ptr;
- break;
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
}
- for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
- if (USE_OP_PTR (ptr)->use == exp1)
+
+ EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
{
- use1 = ptr;
- break;
+ tree var = referenced_var (i);
+
+ /* Subvars are explicitly represented in this list, so we
+ don't need the original to be added to the clobber ops,
+ but the original *will* be in this list because we keep
+ the addressability of the original variable up-to-date
+ to avoid confusing the back-end. */
+ if (var_can_have_subvars (var)
+ && get_subvars_for_var (var) != NULL)
+ continue;
+
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
}
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we dont need to worry about it. */
- if (use0 && use1)
- {
- tree *tmp = USE_OP_PTR (use1)->use;
- USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
- USE_OP_PTR (use0)->use = tmp;
- }
- }
+ break;
+ }
+}
- /* Now swap the data. */
- *exp0 = op1;
- *exp1 = op0;
+
+/* Scan operands for the assignment expression EXPR in statement STMT. */
+
+static void
+get_modify_stmt_operands (tree stmt, tree expr)
+{
+ /* First get operands from the RHS. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
+
+ /* For the LHS, use a regular definition (opf_def) for GIMPLE
+ registers. If the LHS is a store to memory, we will need
+ a preserving definition (VDEF).
+
+ Preserving definitions are those that modify a part of an
+ aggregate object for which no subvars have been computed (or the
+ reference does not correspond exactly to one of them). Stores
+ through a pointer are also represented with VDEF operators.
+
+ We used to distinguish between preserving and killing definitions.
+ We always emit preserving definitions now. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
}
-/* Recursively scan the expression pointed to by EXPR_P in statement referred
- to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
- the operands found. */
+/* Recursively scan the expression pointed to by EXPR_P in statement
+ STMT. FLAGS is one of the OPF_* constants modifying how to
+ interpret the operands found. */
static void
get_expr_operands (tree stmt, tree *expr_p, int flags)
switch (code)
{
case ADDR_EXPR:
- /* We could have the address of a component, array member,
- etc which has interesting variable references. */
/* Taking the address of a variable does not represent a
- reference to it, but the fact that the stmt takes its address will be
- of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, s_ann, 0);
+ reference to it, but the fact that the statement takes its
+ address will be of interest to some passes (e.g. alias
+ resolution). */
+ add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
- /* If the address is invariant, there may be no interesting variable
- references inside. */
+ /* If the address is invariant, there may be no interesting
+ variable references inside. */
if (is_gimple_min_invariant (expr))
return;
- /* There should be no VUSEs created, since the referenced objects are
- not really accessed. The only operands that we should find here
- are ARRAY_REF indices which will always be real operands (GIMPLE
- does not allow non-registers as array indices). */
+ /* Otherwise, there may be variables referenced inside but there
+ should be no VUSEs created, since the referenced objects are
+ not really accessed. The only operands that we should find
+ here are ARRAY_REF indices which will always be real operands
+ (GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
-
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case SSA_NAME:
+ case STRUCT_FIELD_TAG:
+ case SYMBOL_MEMORY_TAG:
+ case NAME_MEMORY_TAG:
+ add_stmt_operand (expr_p, s_ann, flags);
+ return;
+
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- case CONST_DECL:
{
subvar_t svars;
- /* Add the subvars for a variable if it has subvars, to DEFS or USES.
- Otherwise, add the variable itself.
- Whether it goes to USES or DEFS depends on the operand flags. */
+ /* Add the subvars for a variable, if it has subvars, to DEFS
+ or USES. Otherwise, add the variable itself. Whether it
+ goes to USES or DEFS depends on the operand flags. */
if (var_can_have_subvars (expr)
&& (svars = get_subvars_for_var (expr)))
{
add_stmt_operand (&sv->var, s_ann, flags);
}
else
- {
- add_stmt_operand (expr_p, s_ann, flags);
- }
+ add_stmt_operand (expr_p, s_ann, flags);
+
return;
}
+
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
/* fall through */
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
- get_indirect_ref_operands (stmt, expr, flags);
+ get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
return;
case TARGET_MEM_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
- /* Treat array references as references to the virtual variable
- representing the array. The virtual variable for an ARRAY_REF
- is the VAR_DECL for the array. */
-
- /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
- according to the value of IS_DEF. Recurse if the LHS of the
- ARRAY_REF node is not a regular variable. */
- if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, s_ann, flags);
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
- return;
-
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
{
tree ref;
- unsigned HOST_WIDE_INT offset, size;
- /* This component ref becomes an access to all of the subvariables
- it can touch, if we can determine that, but *NOT* the real one.
- If we can't determine which fields we could touch, the recursion
- will eventually get to a variable and add *all* of its subvars, or
- whatever is the minimum correct subset. */
-
- ref = okay_component_ref_for_subvars (expr, &offset, &size);
- if (ref)
- {
- subvar_t svars = get_subvars_for_var (ref);
+ HOST_WIDE_INT offset, size, maxsize;
+ bool none = true;
+
+ /* This component reference becomes an access to all of the
+ subvariables it can touch, if we can determine that, but
+ *NOT* the real one. If we can't determine which fields we
+ could touch, the recursion will eventually get to a
+ variable and add *all* of its subvars, or whatever is the
+ minimum correct subset. */
+ ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
+ if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
+ {
subvar_t sv;
+ subvar_t svars = get_subvars_for_var (ref);
+
for (sv = svars; sv; sv = sv->next)
{
bool exact;
- if (overlap_subvar (offset, size, sv, &exact))
+
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
{
int subvar_flags = flags;
- if (!exact)
- subvar_flags &= ~opf_kill_def;
+ none = false;
add_stmt_operand (&sv->var, s_ann, subvar_flags);
}
}
+
+ if (!none)
+ flags |= opf_no_vops;
}
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
- flags & ~opf_kill_def);
+ else if (TREE_CODE (ref) == INDIRECT_REF)
+ {
+ get_indirect_ref_operands (stmt, ref, flags, expr, offset,
+ maxsize, false);
+ flags |= opf_no_vops;
+ }
+
+ /* Even if we found subvars above we need to ensure to see
+ immediate uses for d in s.a[d]. In case of s.a having
+ a subvar or we would miss it otherwise. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
s_ann->has_volatile_ops = true;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ }
+ else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
}
+
return;
}
+
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case COND_EXPR:
case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
return;
- case MODIFY_EXPR:
- {
- int subflags;
- tree op;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
-
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == WITH_SIZE_EXPR)
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == ARRAY_REF
- || TREE_CODE (op) == ARRAY_RANGE_REF
- || TREE_CODE (op) == REALPART_EXPR
- || TREE_CODE (op) == IMAGPART_EXPR)
- subflags = opf_is_def;
- else
- subflags = opf_is_def | opf_kill_def;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
- return;
- }
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, expr);
+ return;
case CONSTRUCTOR:
{
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
- get_expr_operands (stmt, &ce->value, opf_none);
+ get_expr_operands (stmt, &ce->value, opf_use);
return;
}
- case TRUTH_NOT_EXPR:
case BIT_FIELD_REF:
+ case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
case ASSERT_EXPR:
do_binary:
{
- tree op0 = TREE_OPERAND (expr, 0);
- tree op1 = TREE_OPERAND (expr, 1);
-
- /* If it would be profitable to swap the operands, then do so to
- canonicalize the statement, enabling better optimization.
-
- By placing canonicalization of such expressions here we
- transparently keep statements in canonical form, even
- when the statement is modified. */
- if (tree_swap_operands_p (op0, op1, false))
- {
- /* For relationals we need to swap the operands
- and change the code. */
- if (code == LT_EXPR
- || code == GT_EXPR
- || code == LE_EXPR
- || code == GE_EXPR)
- {
- TREE_SET_CODE (expr, swap_tree_comparison (code));
- swap_tree_operands (stmt,
- &TREE_OPERAND (expr, 0),
- &TREE_OPERAND (expr, 1));
- }
-
- /* For a commutative operator we can just swap the operands. */
- else if (commutative_tree_code (code))
- {
- swap_tree_operands (stmt,
- &TREE_OPERAND (expr, 0),
- &TREE_OPERAND (expr, 1));
- }
- }
-
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
return;
}
+ case DOT_PROD_EXPR:
case REALIGN_LOAD_EXPR:
{
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
case EXC_PTR_EXPR:
case FILTER_EXPR:
case LABEL_DECL:
+ case CONST_DECL:
+ case OMP_PARALLEL:
+ case OMP_SECTIONS:
+ case OMP_FOR:
+ case OMP_SINGLE:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
/* Expressions that make no memory references. */
return;
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
- internal_error ("internal error");
#endif
gcc_unreachable ();
}
-/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
+/* Parse STMT looking for operands. When finished, the various
+ build_* operand vectors will have potential operands in them. */
static void
-get_asm_expr_operands (tree stmt)
+parse_ssa_operands (tree stmt)
{
- stmt_ann_t s_ann = stmt_ann (stmt);
- int noutputs = list_length (ASM_OUTPUTS (stmt));
- const char **oconstraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i;
- tree link;
- const char *constraint;
- bool allows_mem, allows_reg, is_inout;
+ enum tree_code code;
- for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+ code = TREE_CODE (stmt);
+ switch (code)
{
- oconstraints[i] = constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_output_constraint (&constraint, i, 0, 0,
- &allows_mem, &allows_reg, &is_inout);
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, stmt);
+ break;
- /* This should have been split in gimplify_asm_expr. */
- gcc_assert (!allows_reg || !is_inout);
+ case COND_EXPR:
+ get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
+ break;
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t) && s_ann)
- add_to_addressable_set (t, &s_ann->addresses_taken);
- }
+ case SWITCH_EXPR:
+ get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
+ break;
- get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
- }
+ case ASM_EXPR:
+ get_asm_expr_operands (stmt);
+ break;
- for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
- {
- constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
+ case RETURN_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
+ break;
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t) && s_ann)
- add_to_addressable_set (t, &s_ann->addresses_taken);
- }
+ case GOTO_EXPR:
+ get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
+ break;
- get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ case LABEL_EXPR:
+ get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
+ break;
+
+ case BIND_EXPR:
+ case CASE_LABEL_EXPR:
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ case EH_FILTER_EXPR:
+ case CATCH_EXPR:
+ case RESX_EXPR:
+ /* These nodes contain no variable references. */
+ break;
+
+ default:
+ /* Notice that if get_expr_operands tries to use &STMT as the
+ operand pointer (which may only happen for USE operands), we
+ will fail in add_stmt_operand. This default will handle
+ statements like empty statements, or CALL_EXPRs that may
+ appear on the RHS of a statement or as statements themselves. */
+ get_expr_operands (stmt, &stmt, opf_use);
+ break;
}
+}
- /* Clobber memory for asm ("" : : : "memory"); */
- for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- unsigned i;
- bitmap_iterator bi;
+/* Create an operands cache for STMT. */
- /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
- decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
+static void
+build_ssa_operands (tree stmt)
+{
+ stmt_ann_t ann = get_stmt_ann (stmt);
+
+ /* Initially assume that the statement has no volatile operands and
+ makes no memory references. */
+ ann->has_volatile_ops = false;
+ ann->references_memory = false;
+ /* Just clear the bitmap so we don't end up reallocating it over and over. */
+ if (ann->addresses_taken)
+ bitmap_clear (ann->addresses_taken);
- /* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so
- we don't need the original to be added to the clobber
- ops, but the original *will* be in this list because
- we keep the addressability of the original
- variable up-to-date so we don't screw up the rest of
- the backend. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+ operand_build_sort_virtual (build_vuses);
+ operand_build_sort_virtual (build_vdefs);
+ finalize_ssa_stmt_operands (stmt);
- break;
- }
+ if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
+ ann->addresses_taken = NULL;
+ /* For added safety, assume that statements with volatile operands
+ also reference memory. */
+ if (ann->has_volatile_ops)
+ ann->references_memory = true;
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
-
-static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags)
-{
- tree *pptr = &TREE_OPERAND (expr, 0);
- tree ptr = *pptr;
- stmt_ann_t s_ann = stmt_ann (stmt);
- /* Stores into INDIRECT_REF operands are never killing definitions. */
- flags &= ~opf_kill_def;
+/* Free any operands vectors in OPS. */
- if (SSA_VAR_P (ptr))
- {
- struct ptr_info_def *pi = NULL;
+void
+free_ssa_operands (stmt_operands_p ops)
+{
+ ops->def_ops = NULL;
+ ops->use_ops = NULL;
+ ops->vdef_ops = NULL;
+ ops->vuse_ops = NULL;
+ BITMAP_FREE (ops->loads);
+ BITMAP_FREE (ops->stores);
+}
- /* If PTR has flow-sensitive points-to information, use it. */
- if (TREE_CODE (ptr) == SSA_NAME
- && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
- && pi->name_mem_tag)
- {
- /* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
- }
- else
- {
- /* If PTR is not an SSA_NAME or it doesn't have a name
- tag, use its type memory tag. */
- var_ann_t v_ann;
- /* If we are emitting debugging dumps, display a warning if
- PTR is an SSA_NAME with no flow-sensitive alias
- information. That means that we may need to compute
- aliasing again. */
- if (dump_file
- && TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
- {
- fprintf (dump_file,
- "NOTE: no flow-sensitive alias info for ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
- }
+/* Get the operands of statement STMT. */
- if (TREE_CODE (ptr) == SSA_NAME)
- ptr = SSA_NAME_VAR (ptr);
- v_ann = var_ann (ptr);
- if (v_ann->type_mem_tag)
- add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
- }
- }
+void
+update_stmt_operands (tree stmt)
+{
+ stmt_ann_t ann = get_stmt_ann (stmt);
- /* If a constant is used as a pointer, we can't generate a real
- operand for it but we mark the statement volatile to prevent
- optimizations from messing things up. */
- else if (TREE_CODE (ptr) == INTEGER_CST)
- {
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
- }
+ /* If update_stmt_operands is called before SSA is initialized, do
+ nothing. */
+ if (!ssa_operands_active ())
+ return;
- /* Everything else *should* have been folded elsewhere, but users
- are smarter than we in finding ways to write invalid code. We
- cannot just assert here. If we were absolutely certain that we
- do handle all valid cases, then we could just do nothing here.
- That seems optimistic, so attempt to do something logical... */
- else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
- {
- /* Make sure we know the object is addressable. */
- pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, s_ann, 0);
+ /* The optimizers cannot handle statements that are nothing but a
+ _DECL. This indicates a bug in the gimplifier. */
+ gcc_assert (!SSA_VAR_P (stmt));
- /* Mark the object itself with a VUSE. */
- pptr = &TREE_OPERAND (*pptr, 0);
- get_expr_operands (stmt, pptr, flags);
- return;
- }
+ timevar_push (TV_TREE_OPS);
- /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
- else
- gcc_unreachable ();
+ gcc_assert (ann->modified);
+ build_ssa_operands (stmt);
+ ann->modified = 0;
- /* Add a USE operand for the base pointer. */
- get_expr_operands (stmt, pptr, opf_none);
+ timevar_pop (TV_TREE_OPS);
}
-/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
-static void
-get_tmr_operands (tree stmt, tree expr, int flags)
+/* Copies virtual operands from SRC to DST. */
+
+void
+copy_virtual_operands (tree dest, tree src)
{
- tree tag = TMR_TAG (expr);
+ unsigned int i, n;
+ voptype_p src_vuses, dest_vuses;
+ voptype_p src_vdefs, dest_vdefs;
+ struct voptype_d vuse;
+ struct voptype_d vdef;
+ stmt_ann_t dest_ann;
- /* First record the real operands. */
- get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
- get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
+ VDEF_OPS (dest) = NULL;
+ VUSE_OPS (dest) = NULL;
- /* MEM_REFs should never be killing. */
- flags &= ~opf_kill_def;
+ dest_ann = get_stmt_ann (dest);
+ BITMAP_FREE (dest_ann->operands.loads);
+ BITMAP_FREE (dest_ann->operands.stores);
- if (TMR_SYMBOL (expr))
+ if (LOADED_SYMS (src))
{
- stmt_ann_t ann = stmt_ann (stmt);
- add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
+ dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
+ bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
}
- if (tag)
- add_stmt_operand (&tag, stmt_ann (stmt), flags);
- else
- /* Something weird, so ensure that we will be careful. */
- stmt_ann (stmt)->has_volatile_ops = true;
-}
-
-/* A subroutine of get_expr_operands to handle CALL_EXPR. */
-
-static void
-get_call_expr_operands (tree stmt, tree expr)
-{
- tree op;
- int call_flags = call_expr_flags (expr);
-
- /* If aliases have been computed already, add V_MAY_DEF or V_USE
- operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
- if (aliases_computed_p
- && !bitmap_empty_p (call_clobbered_vars)
- && !(call_flags & ECF_NOVOPS))
+ if (STORED_SYMS (src))
{
- /* A 'pure' or a 'const' function never call-clobbers anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
- if (TREE_SIDE_EFFECTS (expr)
- && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt, get_callee_fndecl (expr));
- else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt);
+ dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+ bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
}
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+ /* Copy all the VUSE operators and corresponding operands. */
+ dest_vuses = &vuse;
+ for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
+ {
+ n = VUSE_NUM (src_vuses);
+ dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ if (VUSE_OPS (dest) == NULL)
+ VUSE_OPS (dest) = vuse.next;
+ }
+ /* Copy all the VDEF operators and corresponding operands. */
+ dest_vdefs = &vdef;
+ for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
+ {
+ n = VUSE_NUM (src_vdefs);
+ dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
+ VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
+
+ if (VDEF_OPS (dest) == NULL)
+ VDEF_OPS (dest) = vdef.next;
+ }
}
-/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
- get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
- the statement's real operands, otherwise it is added to virtual
- operands. */
+/* Specifically for use in DOM's expression analysis. Given a store, we
+ create an artificial stmt which looks like a load from the store, this can
+ be used to eliminate redundant loads. OLD_OPS are the operands from the
+ store stmt, and NEW_STMT is the new load which represents a load of the
+ values stored. */
-static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+void
+create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
{
- bool is_real_op;
- tree var, sym;
- var_ann_t v_ann;
-
- var = *var_p;
- STRIP_NOPS (var);
-
- /* If the operand is an ADDR_EXPR, add its operand to the list of
- variables that have had their address taken in this statement. */
- if (TREE_CODE (var) == ADDR_EXPR && s_ann)
- {
- add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
- return;
- }
-
- /* If the original variable is not a scalar, it will be added to the list
- of virtual operands. In that case, use its base symbol as the virtual
- variable representing it. */
- is_real_op = is_gimple_reg (var);
- if (!is_real_op && !DECL_P (var))
- var = get_virtual_var (var);
-
- /* If VAR is not a variable that we care to optimize, do nothing. */
- if (var == NULL_TREE || !SSA_VAR_P (var))
- return;
+ tree op;
+ ssa_op_iter iter;
+ use_operand_p use_p;
+ unsigned i;
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
+ get_stmt_ann (new_stmt);
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
+ /* Process NEW_STMT looking for operands. */
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (new_stmt);
- /* If the variable cannot be modified and this is a V_MAY_DEF change
- it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writable variables. So we only
- check that this only happens on non-specific stores.
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vuse_list = false;
+
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vdef_list = false;
- Note that if this is a specific store, i.e. associated with a
- modify_expr, then we can't suppress the V_DEF, lest we run into
- validation problems.
+ /* Remove any virtual operands that were found. */
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
- This can happen when programs cast away const, leaving us with a
- store to read-only memory. If the statement is actually executed
- at runtime, then the program is ill formed. If the statement is
- not executed then all is well. At the very least, we cannot ICE. */
- if ((flags & opf_non_specific) && unmodifiable_var_p (var))
- {
- gcc_assert (!is_real_op);
- flags &= ~(opf_is_def | opf_kill_def);
- }
+ /* For each VDEF on the original statement, we want to create a
+ VUSE of the VDEF result operand on the new statement. */
+ FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
+ append_vuse (op);
- if (is_real_op)
- {
- /* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
- append_def (var_p);
- else
- append_use (var_p);
- }
- else
- {
- varray_type aliases;
+ finalize_ssa_stmt_operands (new_stmt);
- /* The variable is not a GIMPLE register. Add it (or its aliases) to
- virtual operands, unless the caller has specifically requested
- not to add virtual operands (used when adding operands inside an
- ADDR_EXPR expression). */
- if (flags & opf_no_vops)
- return;
+ /* All uses in this fake stmt must not be in the immediate use lists. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
+}
- aliases = v_ann->may_aliases;
- if (aliases == NULL)
- {
- /* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- {
- if (flags & opf_kill_def)
- {
- /* Only regular variables or struct fields may get a
- V_MUST_DEF operand. */
- gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
- || v_ann->mem_tag_kind == STRUCT_FIELD);
- /* V_MUST_DEF for non-aliased, non-GIMPLE register
- variable definitions. */
- append_v_must_def (var);
- }
- else
- {
- /* Add a V_MAY_DEF for call-clobbered variables and
- memory tags. */
- append_v_may_def (var);
- }
- }
- else
- {
- append_vuse (var);
- if (s_ann && v_ann->is_alias_tag)
- s_ann->makes_aliased_loads = 1;
- }
- }
- else
- {
- size_t i;
+/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
+ to test the validity of the swap operation. */
- /* The variable is aliased. Add its aliases to the virtual
- operands. */
- gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
+void
+swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
+{
+ tree op0, op1;
+ op0 = *exp0;
+ op1 = *exp1;
- if (flags & opf_is_def)
- {
- /* If the variable is also an alias tag, add a virtual
- operand for it, otherwise we will miss representing
- references to the members of the variable's alias set.
- This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
- if (v_ann->is_alias_tag)
- append_v_may_def (var);
-
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_v_may_def (VARRAY_TREE (aliases, i));
-
- if (s_ann)
- s_ann->makes_aliased_stores = 1;
- }
- else
- {
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_alias_tag)
- append_vuse (var);
+ /* If the operand cache is active, attempt to preserve the relative
+ positions of these two operands in their respective immediate use
+ lists. */
+ if (ssa_operands_active () && op0 != op1)
+ {
+ use_optype_p use0, use1, ptr;
+ use0 = use1 = NULL;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_vuse (VARRAY_TREE (aliases, i));
+ /* Find the 2 operands in the cache, if they are there. */
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp0)
+ {
+ use0 = ptr;
+ break;
+ }
- if (s_ann)
- s_ann->makes_aliased_loads = 1;
- }
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp1)
+ {
+ use1 = ptr;
+ break;
+ }
+
+ /* If both uses don't have operand entries, there isn't much we can do
+ at this point. Presumably we don't need to worry about it. */
+ if (use0 && use1)
+ {
+ tree *tmp = USE_OP_PTR (use1)->use;
+ USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
+ USE_OP_PTR (use0)->use = tmp;
}
}
+
+ /* Now swap the data. */
+ *exp0 = op1;
+ *exp1 = op0;
}
-
+
/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
*ADDRESSES_TAKEN is NULL, a new set is created. REF may be
a single variable whose address has been taken or any other valid
}
-/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
- clobbered variables in the function. */
-
-static void
-add_call_clobber_ops (tree stmt, tree callee)
-{
- int i;
- unsigned u;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
- bitmap not_read_b, not_written_b;
-
- /* Functions that are not const, pure or never return may clobber
- call-clobbered variables. */
- if (s_ann)
- s_ann->makes_clobbering_call = true;
-
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- return;
- }
-
- /* FIXME - if we have better information from the static vars
- analysis, we need to make the cache call site specific. This way
- we can have the performance benefits even if we are doing good
- optimization. */
-
- /* Get info for local and module level statics. There is a bit
- set for each static if the call being processed does not read
- or write that variable. */
-
- not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
- not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
-
- /* If cache is valid, copy the elements into the build vectors. */
- if (ssa_call_clobbered_cache_valid
- && (!not_read_b || bitmap_empty_p (not_read_b))
- && (!not_written_b || bitmap_empty_p (not_written_b)))
- {
- /* Process the caches in reverse order so we are always inserting at
- the head of the list. */
- for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
- {
- t = VEC_index (tree, clobbered_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- opbuild_append_virtual (&build_vuses, t);
- }
- for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
- {
- t = VEC_index (tree, clobbered_v_may_defs, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_v_may_def_list = 1;
- opbuild_append_virtual (&build_v_may_defs, t);
- }
- if (s_ann)
- {
- s_ann->makes_aliased_loads = clobbered_aliased_loads;
- s_ann->makes_aliased_stores = clobbered_aliased_stores;
- }
- return;
- }
-
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
-
- /* Add a V_MAY_DEF operand for every call clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
- {
- tree var = referenced_var (u);
- if (unmodifiable_var_p (var))
- add_stmt_operand (&var, &empty_ann, opf_none);
- else
- {
- bool not_read
- = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
- bool not_written
- = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
-
- if ((TREE_READONLY (var)
- && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
- || not_written)
- {
- if (!not_read)
- add_stmt_operand (&var, &empty_ann, opf_none);
- }
- else
- add_stmt_operand (&var, &empty_ann, opf_is_def);
- }
- }
-
- if ((!not_read_b || bitmap_empty_p (not_read_b))
- && (!not_written_b || bitmap_empty_p (not_written_b)))
- {
- clobbered_aliased_loads = empty_ann.makes_aliased_loads;
- clobbered_aliased_stores = empty_ann.makes_aliased_stores;
-
- /* Set the flags for a stmt's annotation. */
- if (s_ann)
- {
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
- s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
- }
-
- /* Prepare empty cache vectors. */
- VEC_truncate (tree, clobbered_vuses, 0);
- VEC_truncate (tree, clobbered_v_may_defs, 0);
-
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = opbuild_first (&build_vuses);
- i != OPBUILD_LAST;
- i = opbuild_next (&build_vuses, i))
- VEC_safe_push (tree, heap, clobbered_vuses,
- opbuild_elem_virtual (&build_vuses, i));
-
- gcc_assert (opbuild_num_elems (&build_vuses)
- == VEC_length (tree, clobbered_vuses));
-
- for (i = opbuild_first (&build_v_may_defs);
- i != OPBUILD_LAST;
- i = opbuild_next (&build_v_may_defs, i))
- VEC_safe_push (tree, heap, clobbered_v_may_defs,
- opbuild_elem_virtual (&build_v_may_defs, i));
-
- gcc_assert (opbuild_num_elems (&build_v_may_defs)
- == VEC_length (tree, clobbered_v_may_defs));
-
- ssa_call_clobbered_cache_valid = true;
- }
-}
-
-
-/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
- function. */
-
-static void
-add_call_read_ops (tree stmt)
-{
- int i;
- unsigned u;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
-
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_none);
- return;
- }
-
- /* If cache is valid, copy the elements into the build vector. */
- if (ssa_ro_call_cache_valid)
- {
- for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
- {
- /* Process the caches in reverse order so we are always inserting at
- the head of the list. */
- t = VEC_index (tree, ro_call_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- opbuild_append_virtual (&build_vuses, t);
- }
- if (s_ann)
- s_ann->makes_aliased_loads = ro_call_aliased_loads;
- return;
- }
-
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
-
- /* Add a VUSE for each call-clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
- {
- tree var = referenced_var (u);
- add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
- }
-
- ro_call_aliased_loads = empty_ann.makes_aliased_loads;
- if (s_ann)
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
-
- /* Prepare empty cache vectors. */
- VEC_truncate (tree, ro_call_vuses, 0);
-
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = opbuild_first (&build_vuses);
- i != OPBUILD_LAST;
- i = opbuild_next (&build_vuses, i))
- VEC_safe_push (tree, heap, ro_call_vuses,
- opbuild_elem_virtual (&build_vuses, i));
-
- gcc_assert (opbuild_num_elems (&build_vuses)
- == VEC_length (tree, ro_call_vuses));
-
- ssa_ro_call_cache_valid = true;
-}
-
-
/* Scan the immediate_use list for VAR making sure its linked properly.
- return RTUE iof there is a problem. */
+ Return TRUE if there is a problem and emit an error message to F. */
bool
verify_imm_links (FILE *f, tree var)
prev = ptr;
ptr = ptr->next;
- /* Avoid infinite loops. */
- if (count++ > 30000)
+
+ /* Avoid infinite loops. 50,000,000 uses probably indicates a
+ problem. */
+ if (count++ > 50000000)
goto error;
}
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- if (!is_gimple_reg (USE_FROM_PTR (use_p)))
- print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ if (use_p->stmt == NULL && use_p->use == NULL)
+ fprintf (file, "***end of stmt iterator marker***\n");
else
- print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
+ if (!is_gimple_reg (USE_FROM_PTR (use_p)))
+ print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
+ else
+ print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
}
fprintf(file, "\n");
}
+
/* Dump all the immediate uses to FILE. */
void
dump_immediate_uses (stderr);
}
+
/* Dump def-use edges on stderr. */
void
{
dump_immediate_uses_for (stderr, var);
}
-#include "gt-tree-ssa-operands.h"
+
+
+/* Create a new change buffer for the statement pointed by STMT_P and
+ push the buffer into SCB_STACK. Each change buffer
+ records state information needed to determine what changed in the
+ statement. Mainly, this keeps track of symbols that may need to be
+ put into SSA form, SSA name replacements and other information
+ needed to keep the SSA form up to date. */
+
+void
+push_stmt_changes (tree *stmt_p)
+{
+ tree stmt;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = xmalloc (sizeof *buf);
+ memset (buf, 0, sizeof *buf);
+
+ buf->stmt_p = stmt_p;
+
+ if (stmt_references_memory_p (stmt))
+ {
+ tree op;
+ ssa_op_iter i;
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (buf->loads == NULL)
+ buf->loads = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (buf->loads, DECL_UID (sym));
+ }
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (buf->stores == NULL)
+ buf->stores = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (buf->stores, DECL_UID (sym));
+ }
+ }
+
+ VEC_safe_push (scb_t, heap, scb_stack, buf);
+}
+
+
+/* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
+ for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
+
+static void
+mark_difference_for_renaming (bitmap s1, bitmap s2)
+{
+ if (s1 == NULL && s2 == NULL)
+ return;
+
+ if (s1 && s2 == NULL)
+ mark_set_for_renaming (s1);
+ else if (s1 == NULL && s2)
+ mark_set_for_renaming (s2);
+ else if (!bitmap_equal_p (s1, s2))
+ {
+ bitmap t1 = BITMAP_ALLOC (NULL);
+ bitmap t2 = BITMAP_ALLOC (NULL);
+
+ bitmap_and_compl (t1, s1, s2);
+ bitmap_and_compl (t2, s2, s1);
+ bitmap_ior_into (t1, t2);
+ mark_set_for_renaming (t1);
+
+ BITMAP_FREE (t1);
+ BITMAP_FREE (t2);
+ }
+}
+
+
+/* Pop the top SCB from SCB_STACK and act on the differences between
+ what was recorded by push_stmt_changes and the current state of
+ the statement. */
+
+void
+pop_stmt_changes (tree *stmt_p)
+{
+ tree op, stmt;
+ ssa_op_iter iter;
+ bitmap loads, stores;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = VEC_pop (scb_t, scb_stack);
+ gcc_assert (stmt_p == buf->stmt_p);
+
+ /* Force an operand re-scan on the statement and mark any newly
+ exposed variables. */
+ update_stmt (stmt);
+
+ /* Determine whether any memory symbols need to be renamed. If the
+ sets of loads and stores are different after the statement is
+ modified, then the affected symbols need to be renamed.
+
+ Note that it may be possible for the statement to not reference
+ memory anymore, but we still need to act on the differences in
+ the sets of symbols. */
+ loads = stores = NULL;
+ if (stmt_references_memory_p (stmt))
+ {
+ tree op;
+ ssa_op_iter i;
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (loads == NULL)
+ loads = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (loads, DECL_UID (sym));
+ }
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (stores == NULL)
+ stores = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (stores, DECL_UID (sym));
+ }
+ }
+
+ /* If LOADS is different from BUF->LOADS, the affected
+ symbols need to be marked for renaming. */
+ mark_difference_for_renaming (loads, buf->loads);
+
+ /* Similarly for STORES and BUF->STORES. */
+ mark_difference_for_renaming (stores, buf->stores);
+
+ /* Mark all the naked GIMPLE register operands for renaming. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
+ if (DECL_P (op))
+ mark_sym_for_renaming (op);
+
+ /* FIXME, need to add more finalizers here. Cleanup EH info,
+ recompute invariants for address expressions, add
+ SSA replacement mappings, etc. For instance, given
+ testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
+ the form:
+
+ # SMT.4_20 = VDEF <SMT.4_16>
+ D.1576_11 = 1.0e+0;
+
+ So, the VDEF will disappear, but instead of marking SMT.4 for
+ renaming it would be far more efficient to establish a
+ replacement mapping that would replace every reference of
+ SMT.4_20 with SMT.4_16. */
+
+ /* Free memory used by the buffer. */
+ BITMAP_FREE (buf->loads);
+ BITMAP_FREE (buf->stores);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
+ buf->stmt_p = NULL;
+ free (buf);
+}
+
+
+/* Discard the topmost change buffer from SCB_STACK. This is useful
+ when the caller realized that it did not actually modified the
+ statement. It avoids the expensive operand re-scan. */
+
+void
+discard_stmt_changes (tree *stmt_p)
+{
+ scb_t buf;
+ tree stmt;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ stmt = *stmt_p;
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = VEC_pop (scb_t, scb_stack);
+ gcc_assert (stmt_p == buf->stmt_p);
+
+ /* Free memory used by the buffer. */
+ BITMAP_FREE (buf->loads);
+ BITMAP_FREE (buf->stores);
+ buf->stmt_p = NULL;
+ free (buf);
+}
+
+
+/* Returns true if statement STMT may access memory. */
+
+bool
+stmt_references_memory_p (tree stmt)
+{
+ if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
+ return false;
+
+ return stmt_ann (stmt)->references_memory;
+}