X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-operands.c;h=85a0a08b3da67a8cefcd0b82fd286b5da9d202e1;hb=8d40ea4504ee85e1a6b73103edb1776141d5d22e;hp=dbbcf341d29684a40b7db306627b7c2901cadc3b;hpb=7920eed55f2a73bcbfd84f9bf5ed4429fcd8cbd2;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index dbbcf341d29..85a0a08b3da 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -1,11 +1,12 @@ /* SSA operands management for trees. - Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 + Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) +the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, @@ -14,9 +15,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 51 Franklin Street, Fifth Floor, -Boston, MA 02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -71,10 +71,14 @@ Boston, MA 02110-1301, USA. */ variable, and that same variable occurs in the same operands cache, then the new cache vector will also get the same SSA_NAME. - i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand - vector for VUSE, then the new vector will also be modified such that - it contains 'a_5' rather than 'a'. */ + i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new + operand vector for VUSE, then the new vector will also be modified + such that it contains 'a_5' rather than 'a'. */ +/* Helper functions from gimple.c. These are GIMPLE manipulation + routines that only the operand scanner should need. */ +void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *); +void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *); /* Structure storing statistics on how many call clobbers we have, and how many where avoided. */ @@ -123,7 +127,7 @@ static struct #define opf_no_vops (1 << 1) /* Operand is an implicit reference. This is used to distinguish - explicit assignments in the form of GIMPLE_MODIFY_STMT from + explicit assignments in the form of MODIFY_EXPR from clobbering sites like function calls or ASM_EXPRs. */ #define opf_implicit (1 << 2) @@ -139,13 +143,17 @@ static VEC(tree,heap) *build_vdefs; /* Set for building all the VUSE operands. */ static VEC(tree,heap) *build_vuses; +/* Bitmap obstack for our datastructures that needs to survive across + compilations of multiple functions. */ +static bitmap_obstack operands_bitmap_obstack; + /* Set for building all the loaded symbols. */ static bitmap build_loads; /* Set for building all the stored symbols. */ static bitmap build_stores; -static void get_expr_operands (tree, tree *, int); +static void get_expr_operands (gimple, tree *, int); /* Number of functions with initialized ssa_operands. */ static int n_initialized = 0; @@ -175,7 +183,7 @@ static int n_initialized = 0; struct scb_d { /* Pointer to the statement being modified. */ - tree *stmt_p; + gimple *stmt_p; /* If the statement references memory these are the sets of symbols loaded and stored by the statement. */ @@ -196,7 +204,7 @@ static VEC(scb_t,heap) *scb_stack; /* Return the DECL_UID of the base variable of T. */ static inline unsigned -get_name_decl (tree t) +get_name_decl (const_tree t) { if (TREE_CODE (t) != SSA_NAME) return DECL_UID (t); @@ -207,15 +215,13 @@ get_name_decl (tree t) /* Comparison function for qsort used in operand_build_sort_virtual. */ -static int +int operand_build_cmp (const void *p, const void *q) { - tree e1 = *((const tree *)p); - tree e2 = *((const tree *)q); - unsigned int u1,u2; - - u1 = get_name_decl (e1); - u2 = get_name_decl (e2); + const_tree const e1 = *((const_tree const *)p); + const_tree const e2 = *((const_tree const *)q); + const unsigned int u1 = get_name_decl (e1); + const unsigned int u2 = get_name_decl (e2); /* We want to sort in ascending order. They can never be equal. */ #ifdef ENABLE_CHECKING @@ -255,12 +261,18 @@ operand_build_sort_virtual (VEC(tree,heap) *list) operand_build_cmp); } - /* Return true if the SSA operands cache is active. */ bool ssa_operands_active (void) { + /* This function may be invoked from contexts where CFUN is NULL + (IPA passes), return false for now. FIXME: operands may be + active in each individual function, maybe this function should + take CFUN as a parameter. */ + if (cfun == NULL) + return false; + return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; } @@ -302,15 +314,17 @@ vop_free_bucket_size (int bucket) static inline int vop_free_bucket_index (int num) { - gcc_assert (num > 0); + gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16); /* Sizes 1 through 16 use buckets 0-15. */ if (num <= 16) return num - 1; - /* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */ - if (num < 256) - return 14 + (num - 1) / 8; - return -1; + /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */ + num = 14 + (num - 1) / 8; + if (num >= NUM_VOP_FREE_BUCKETS) + return -1; + else + return num; } @@ -351,9 +365,6 @@ add_vop_to_freelist (voptype_p ptr) #define OP_SIZE_2 110 #define OP_SIZE_3 511 -/* Current size of the operand memory buffer. */ -static unsigned int ssa_operand_mem_size; - /* Initialize the operand cache routines. */ void @@ -365,18 +376,20 @@ init_ssa_operands (void) build_uses = VEC_alloc (tree, heap, 10); build_vuses = VEC_alloc (tree, heap, 25); build_vdefs = VEC_alloc (tree, heap, 25); - build_loads = BITMAP_ALLOC (NULL); - build_stores = BITMAP_ALLOC (NULL); + bitmap_obstack_initialize (&operands_bitmap_obstack); + build_loads = BITMAP_ALLOC (&operands_bitmap_obstack); + build_stores = BITMAP_ALLOC (&operands_bitmap_obstack); scb_stack = VEC_alloc (scb_t, heap, 20); } gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL); - gimple_ssa_operands (cfun)->operand_memory_index = ssa_operand_mem_size; + gimple_ssa_operands (cfun)->operand_memory_index + = gimple_ssa_operands (cfun)->ssa_operand_mem_size; gimple_ssa_operands (cfun)->ops_active = true; memset (&clobber_stats, 0, sizeof (clobber_stats)); init_vop_buckets (); - ssa_operand_mem_size = OP_SIZE_INIT; + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; } @@ -426,6 +439,9 @@ fini_ssa_operands (void) gimple_ssa_operands (cfun)->ops_active = false; + if (!n_initialized) + bitmap_obstack_release (&operands_bitmap_obstack); + if (dump_file && (dump_flags & TDF_STATS)) { fprintf (dump_file, "Original clobbered vars: %d\n", @@ -452,31 +468,35 @@ ssa_operand_alloc (unsigned size) char *ptr; if (gimple_ssa_operands (cfun)->operand_memory_index + size - >= ssa_operand_mem_size) + >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) { struct ssa_operand_memory_d *ptr; - if (ssa_operand_mem_size == OP_SIZE_INIT) - ssa_operand_mem_size = OP_SIZE_1 * sizeof (struct voptype_d); + if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT) + gimple_ssa_operands (cfun)->ssa_operand_mem_size + = OP_SIZE_1 * sizeof (struct voptype_d); else - if (ssa_operand_mem_size == OP_SIZE_1 * sizeof (struct voptype_d)) - ssa_operand_mem_size = OP_SIZE_2 * sizeof (struct voptype_d); + if (gimple_ssa_operands (cfun)->ssa_operand_mem_size + == OP_SIZE_1 * sizeof (struct voptype_d)) + gimple_ssa_operands (cfun)->ssa_operand_mem_size + = OP_SIZE_2 * sizeof (struct voptype_d); else - ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d); + gimple_ssa_operands (cfun)->ssa_operand_mem_size + = OP_SIZE_3 * sizeof (struct voptype_d); /* Go right to the maximum size if the request is too large. */ - if (size > ssa_operand_mem_size) - ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d); + if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) + gimple_ssa_operands (cfun)->ssa_operand_mem_size + = OP_SIZE_3 * sizeof (struct voptype_d); - /* Fail if there is not enough space. If there are this many operands - required, first make sure there isn't a different problem causing this - many operands. If the decision is that this is OK, then we can - specially allocate a buffer just for this request. */ - gcc_assert (size <= ssa_operand_mem_size); + /* We can reliably trigger the case that we need arbitrary many + operands (see PR34093), so allocate a buffer just for this request. */ + if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size) + gimple_ssa_operands (cfun)->ssa_operand_mem_size = size; ptr = (struct ssa_operand_memory_d *) ggc_alloc (sizeof (struct ssa_operand_memory_d) - + ssa_operand_mem_size - 1); + + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1); ptr->next = gimple_ssa_operands (cfun)->operand_memory; gimple_ssa_operands (cfun)->operand_memory = ptr; gimple_ssa_operands (cfun)->operand_memory_index = 0; @@ -563,11 +583,11 @@ alloc_vop (int num) sure the stmt pointer is set to the current stmt. */ static inline void -set_virtual_use_link (use_operand_p ptr, tree stmt) +set_virtual_use_link (use_operand_p ptr, gimple stmt) { /* fold_stmt may have changed the stmt pointers. */ - if (ptr->stmt != stmt) - ptr->stmt = stmt; + if (ptr->loc.stmt != stmt) + ptr->loc.stmt = stmt; /* If this use isn't in a list, add it to the correct list. */ if (!ptr->prev) @@ -580,55 +600,56 @@ set_virtual_use_link (use_operand_p ptr, tree stmt) static inline def_optype_p add_def_op (tree *op, def_optype_p last) { - def_optype_p new; + def_optype_p new_def; - new = alloc_def (); - DEF_OP_PTR (new) = op; - last->next = new; - new->next = NULL; - return new; + new_def = alloc_def (); + DEF_OP_PTR (new_def) = op; + last->next = new_def; + new_def->next = NULL; + return new_def; } /* Adds OP to the list of uses of statement STMT after LAST. */ static inline use_optype_p -add_use_op (tree stmt, tree *op, use_optype_p last) +add_use_op (gimple stmt, tree *op, use_optype_p last) { - use_optype_p new; - - new = alloc_use (); - USE_OP_PTR (new)->use = op; - link_imm_use_stmt (USE_OP_PTR (new), *op, stmt); - last->next = new; - new->next = NULL; - return new; + use_optype_p new_use; + + new_use = alloc_use (); + USE_OP_PTR (new_use)->use = op; + link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); + last->next = new_use; + new_use->next = NULL; + return new_use; } -/* Return a virtual op pointer with NUM elements which are all initialized to OP - and are linked into the immediate uses for STMT. The new vop is appended - after PREV. */ +/* Return a virtual op pointer with NUM elements which are all + initialized to OP and are linked into the immediate uses for STMT. + The new vop is appended after PREV. */ static inline voptype_p -add_vop (tree stmt, tree op, int num, voptype_p prev) +add_vop (gimple stmt, tree op, int num, voptype_p prev) { - voptype_p new; + voptype_p new_vop; int x; - new = alloc_vop (num); + new_vop = alloc_vop (num); for (x = 0; x < num; x++) { - VUSE_OP_PTR (new, x)->prev = NULL; - SET_VUSE_OP (new, x, op); - VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var; - link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt); + VUSE_OP_PTR (new_vop, x)->prev = NULL; + SET_VUSE_OP (new_vop, x, op); + VUSE_OP_PTR (new_vop, x)->use = &new_vop->usev.uses[x].use_var; + link_imm_use_stmt (VUSE_OP_PTR (new_vop, x), + new_vop->usev.uses[x].use_var, stmt); } if (prev) - prev->next = new; - new->next = NULL; - return new; + prev->next = new_vop; + new_vop->next = NULL; + return new_vop; } @@ -636,11 +657,11 @@ add_vop (tree stmt, tree op, int num, voptype_p prev) LAST to the new element. */ static inline voptype_p -add_vuse_op (tree stmt, tree op, int num, voptype_p last) +add_vuse_op (gimple stmt, tree op, int num, voptype_p last) { - voptype_p new = add_vop (stmt, op, num, last); - VDEF_RESULT (new) = NULL_TREE; - return new; + voptype_p new_vop = add_vop (stmt, op, num, last); + VDEF_RESULT (new_vop) = NULL_TREE; + return new_vop; } @@ -648,111 +669,19 @@ add_vuse_op (tree stmt, tree op, int num, voptype_p last) LAST to the new element. */ static inline voptype_p -add_vdef_op (tree stmt, tree op, int num, voptype_p last) +add_vdef_op (gimple stmt, tree op, int num, voptype_p last) { - voptype_p new = add_vop (stmt, op, num, last); - VDEF_RESULT (new) = op; - return new; + voptype_p new_vop = add_vop (stmt, op, num, last); + VDEF_RESULT (new_vop) = op; + return new_vop; } -/* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT - is the head of the operand list it belongs to. */ - -static inline struct voptype_d * -realloc_vop (struct voptype_d *ptr, int num_elem, struct voptype_d **root) -{ - int x, lim; - tree stmt, val; - struct voptype_d *ret, *tmp; - - if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem) - return ptr; - - val = VUSE_OP (ptr, 0); - if (TREE_CODE (val) == SSA_NAME) - val = SSA_NAME_VAR (val); - - stmt = USE_STMT (VUSE_OP_PTR (ptr, 0)); - - /* Delink all the existing uses. */ - for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++) - { - use_operand_p use_p = VUSE_OP_PTR (ptr, x); - delink_imm_use (use_p); - } - - /* If we want less space, simply use this one, and shrink the size. */ - if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem) - { - VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem; - return ptr; - } - - /* It is growing. Allocate a new one and replace the old one. */ - ret = add_vuse_op (stmt, val, num_elem, ptr); - - /* Clear PTR and add its memory to the free list. */ - lim = VUSE_VECT_NUM_ELEM (ptr->usev); - memset (ptr, 0, - sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1)); - add_vop_to_freelist (ptr); - - /* Now simply remove the old one. */ - if (*root == ptr) - { - *root = ret; - return ret; - } - else - for (tmp = *root; - tmp != NULL && tmp->next != ptr; - tmp = tmp->next) - { - tmp->next = ret; - return ret; - } - - /* The pointer passed in isn't in STMT's VDEF lists. */ - gcc_unreachable (); -} - - -/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */ - -struct voptype_d * -realloc_vdef (struct voptype_d *ptr, int num_elem) -{ - tree val, stmt; - struct voptype_d *ret; - - val = VDEF_RESULT (ptr); - stmt = USE_STMT (VDEF_OP_PTR (ptr, 0)); - ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt))); - VDEF_RESULT (ret) = val; - return ret; -} - - -/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */ - -struct voptype_d * -realloc_vuse (struct voptype_d *ptr, int num_elem) -{ - tree stmt; - struct voptype_d *ret; - - stmt = USE_STMT (VUSE_OP_PTR (ptr, 0)); - ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt))); - return ret; -} - - /* Takes elements from build_defs and turns them into def operands of STMT. TODO -- Make build_defs VEC of tree *. */ static inline void -finalize_ssa_defs (tree stmt) +finalize_ssa_defs (gimple stmt) { unsigned new_i; struct def_optype_d new_list; @@ -760,12 +689,12 @@ finalize_ssa_defs (tree stmt) unsigned int num = VEC_length (tree, build_defs); /* There should only be a single real definition per assignment. */ - gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1); + gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); new_list.next = NULL; last = &new_list; - old_ops = DEF_OPS (stmt); + old_ops = gimple_def_ops (stmt); new_i = 0; @@ -786,13 +715,13 @@ finalize_ssa_defs (tree stmt) last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); /* Now set the stmt's operands. */ - DEF_OPS (stmt) = new_list.next; + gimple_set_def_ops (stmt, new_list.next); #ifdef ENABLE_CHECKING { def_optype_p ptr; unsigned x = 0; - for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_def_ops (stmt); ptr; ptr = ptr->next) x++; gcc_assert (x == num); @@ -805,30 +734,16 @@ finalize_ssa_defs (tree stmt) TODO -- Make build_uses VEC of tree *. */ static inline void -finalize_ssa_uses (tree stmt) +finalize_ssa_uses (gimple stmt) { unsigned new_i; struct use_optype_d new_list; use_optype_p old_ops, ptr, last; -#ifdef ENABLE_CHECKING - { - unsigned x; - unsigned num = VEC_length (tree, build_uses); - - /* If the pointer to the operand is the statement itself, something is - wrong. It means that we are pointing to a local variable (the - initial call to update_stmt_operands does not pass a pointer to a - statement). */ - for (x = 0; x < num; x++) - gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt); - } -#endif - new_list.next = NULL; last = &new_list; - old_ops = USE_OPS (stmt); + old_ops = gimple_use_ops (stmt); /* If there is anything in the old list, free it. */ if (old_ops) @@ -846,12 +761,12 @@ finalize_ssa_uses (tree stmt) last); /* Now set the stmt's operands. */ - USE_OPS (stmt) = new_list.next; + gimple_set_use_ops (stmt, new_list.next); #ifdef ENABLE_CHECKING { unsigned x = 0; - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) x++; gcc_assert (x == VEC_length (tree, build_uses)); @@ -861,27 +776,17 @@ finalize_ssa_uses (tree stmt) /* Takes elements from BUILD_VDEFS and turns them into vdef operands of - STMT. FIXME, for now VDEF operators should have a single operand - in their RHS. */ + STMT. */ static inline void -finalize_ssa_vdefs (tree stmt) +finalize_ssa_vdefs (gimple stmt) { unsigned new_i; struct voptype_d new_list; voptype_p old_ops, ptr, last; - stmt_ann_t ann = stmt_ann (stmt); /* Set the symbols referenced by STMT. */ - if (!bitmap_empty_p (build_stores)) - { - if (ann->operands.stores == NULL) - ann->operands.stores = BITMAP_ALLOC (NULL); - - bitmap_copy (ann->operands.stores, build_stores); - } - else - BITMAP_FREE (ann->operands.stores); + gimple_set_stored_syms (stmt, build_stores, &operands_bitmap_obstack); /* If aliases have not been computed, do not instantiate a virtual operator on STMT. Initially, we only compute the SSA form on @@ -896,7 +801,7 @@ finalize_ssa_vdefs (tree stmt) new_list.next = NULL; last = &new_list; - old_ops = VDEF_OPS (stmt); + old_ops = gimple_vdef_ops (stmt); new_i = 0; while (old_ops && new_i < VEC_length (tree, build_vdefs)) { @@ -951,12 +856,12 @@ finalize_ssa_vdefs (tree stmt) } /* Now set STMT's operands. */ - VDEF_OPS (stmt) = new_list.next; + gimple_set_vdef_ops (stmt, new_list.next); #ifdef ENABLE_CHECKING { unsigned x = 0; - for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_vdef_ops (stmt); ptr; ptr = ptr->next) x++; gcc_assert (x == VEC_length (tree, build_vdefs)); @@ -969,25 +874,14 @@ finalize_ssa_vdefs (tree stmt) STMT. */ static inline void -finalize_ssa_vuse_ops (tree stmt) +finalize_ssa_vuse_ops (gimple stmt) { - unsigned new_i; - int old_i; + unsigned new_i, old_i; voptype_p old_ops, last; VEC(tree,heap) *new_ops; - stmt_ann_t ann; /* Set the symbols referenced by STMT. */ - ann = stmt_ann (stmt); - if (!bitmap_empty_p (build_loads)) - { - if (ann->operands.loads == NULL) - ann->operands.loads = BITMAP_ALLOC (NULL); - - bitmap_copy (ann->operands.loads, build_loads); - } - else - BITMAP_FREE (ann->operands.loads); + gimple_set_loaded_syms (stmt, build_loads, &operands_bitmap_obstack); /* If aliases have not been computed, do not instantiate a virtual operator on STMT. Initially, we only compute the SSA form on @@ -1000,7 +894,7 @@ finalize_ssa_vuse_ops (tree stmt) return; /* STMT should have at most one VUSE operator. */ - old_ops = VUSE_OPS (stmt); + old_ops = gimple_vuse_ops (stmt); gcc_assert (old_ops == NULL || old_ops->next == NULL); new_ops = NULL; @@ -1045,7 +939,7 @@ finalize_ssa_vuse_ops (tree stmt) for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++) delink_imm_use (VUSE_OP_PTR (old_ops, old_i)); add_vop_to_freelist (old_ops); - VUSE_OPS (stmt) = NULL; + gimple_set_vuse_ops (stmt, NULL); } /* If there are any operands, instantiate a VUSE operator for STMT. */ @@ -1059,17 +953,18 @@ finalize_ssa_vuse_ops (tree stmt) for (i = 0; VEC_iterate (tree, new_ops, i, op); i++) SET_USE (VUSE_OP_PTR (last, (int) i), op); - VUSE_OPS (stmt) = last; + gimple_set_vuse_ops (stmt, last); + VEC_free (tree, heap, new_ops); } #ifdef ENABLE_CHECKING { unsigned x; - if (VUSE_OPS (stmt)) + if (gimple_vuse_ops (stmt)) { - gcc_assert (VUSE_OPS (stmt)->next == NULL); - x = VUSE_NUM (VUSE_OPS (stmt)); + gcc_assert (gimple_vuse_ops (stmt)->next == NULL); + x = VUSE_NUM (gimple_vuse_ops (stmt)); } else x = 0; @@ -1082,7 +977,7 @@ finalize_ssa_vuse_ops (tree stmt) /* Return a new VUSE operand vector for STMT. */ static void -finalize_ssa_vuses (tree stmt) +finalize_ssa_vuses (gimple stmt) { unsigned num, num_vdefs; unsigned vuse_index; @@ -1152,12 +1047,15 @@ cleanup_build_arrays (void) /* Finalize all the build vectors, fill the new ones into INFO. */ static inline void -finalize_ssa_stmt_operands (tree stmt) +finalize_ssa_stmt_operands (gimple stmt) { finalize_ssa_defs (stmt); finalize_ssa_uses (stmt); - finalize_ssa_vdefs (stmt); - finalize_ssa_vuses (stmt); + if (gimple_has_mem_ops (stmt)) + { + finalize_ssa_vdefs (stmt); + finalize_ssa_vuses (stmt); + } cleanup_build_arrays (); } @@ -1246,8 +1144,15 @@ append_vuse (tree var) /* Don't allow duplicate entries. */ ann = get_var_ann (var); - if (ann->in_vuse_list || ann->in_vdef_list) + if (ann->in_vuse_list) return; + else if (ann->in_vdef_list) + { + /* We don't want a vuse if we already have a vdef, but we must + still put this in build_loads. */ + bitmap_set_bit (build_loads, DECL_UID (var)); + return; + } ann->in_vuse_list = true; sym = var; @@ -1263,7 +1168,9 @@ append_vuse (tree var) /* REF is a tree that contains the entire pointer dereference expression, if available, or NULL otherwise. ALIAS is the variable we are asking if REF can access. OFFSET and SIZE come from the - memory access expression that generated this virtual operand. */ + memory access expression that generated this virtual operand. + + XXX: We should handle the NO_ALIAS attributes here. */ static bool access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, @@ -1279,49 +1186,11 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, if (alias == gimple_global_var (cfun)) return true; - /* If ALIAS is an SFT, it can't be touched if the offset - and size of the access is not overlapping with the SFT offset and - size. This is only true if we are accessing through a pointer - to a type that is the same as SFT_PARENT_VAR. Otherwise, we may - be accessing through a pointer to some substruct of the - structure, and if we try to prune there, we will have the wrong - offset, and get the wrong answer. - i.e., we can't prune without more work if we have something like - - struct gcc_target - { - struct asm_out - { - const char *byte_op; - struct asm_int_op - { - const char *hi; - } aligned_op; - } asm_out; - } targetm; - - foo = &targetm.asm_out.aligned_op; - return foo->hi; - - SFT.1, which represents hi, will have SFT_OFFSET=32 because in - terms of SFT_PARENT_VAR, that is where it is. - However, the access through the foo pointer will be at offset 0. */ - if (size != -1 - && TREE_CODE (alias) == STRUCT_FIELD_TAG - && base - && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias)) - && !overlap_subvar (offset, size, alias, NULL)) - { -#ifdef ACCESS_DEBUGGING - fprintf (stderr, "Access to "); - print_generic_expr (stderr, ref, 0); - fprintf (stderr, " may not touch "); - print_generic_expr (stderr, alias, 0); - fprintf (stderr, " in function %s\n", get_name (current_function_decl)); -#endif - return false; - } - + /* If ref is a TARGET_MEM_REF, just return true, as we can't really + disambiguate them right now. */ + if (ref && TREE_CODE (ref) == TARGET_MEM_REF) + return true; + /* Without strict aliasing, it is impossible for a component access through a pointer to touch a random variable, unless that variable *is* a structure or a pointer. @@ -1367,19 +1236,37 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, } To implement this, we just punt on accesses through union pointers entirely. + + Another case we have to allow is accessing a variable + through an array access at offset zero. This happens from + code generated by the fortran frontend like + + char[1:1] & my_char_ref; + char my_char; + my_char_ref_1 = (char[1:1] &) &my_char; + D.874_2 = (*my_char_ref_1)[1]{lb: 1 sz: 1}; */ - else if (ref - && flag_strict_aliasing - && TREE_CODE (ref) != INDIRECT_REF - && !MTAG_P (alias) - && (TREE_CODE (base) != INDIRECT_REF - || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) - && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) - && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE - && !var_ann (alias)->is_heapvar - /* When the struct has may_alias attached to it, we need not to - return true. */ - && get_alias_set (base)) + if (ref + && flag_strict_aliasing + && TREE_CODE (ref) != INDIRECT_REF + && !MTAG_P (alias) + && base + && (TREE_CODE (base) != INDIRECT_REF + || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) + && (TREE_CODE (base) != INDIRECT_REF + || TREE_CODE (ref) != ARRAY_REF + || offset != 0 + || (DECL_SIZE (alias) + && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST + && size != -1 + && (unsigned HOST_WIDE_INT)size + != TREE_INT_CST_LOW (DECL_SIZE (alias)))) + && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) + && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE + && !var_ann (alias)->is_heapvar + /* When the struct has may_alias attached to it, we need not to + return true. */ + && get_alias_set (base)) { #ifdef ACCESS_DEBUGGING fprintf (stderr, "Access to "); @@ -1398,11 +1285,12 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, && flag_strict_aliasing && TREE_CODE (ref) != INDIRECT_REF && !MTAG_P (alias) + && !var_ann (alias)->is_heapvar && !POINTER_TYPE_P (TREE_TYPE (alias)) && offsetgtz && DECL_SIZE (alias) && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST - && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias))) + && uoffset >= TREE_INT_CST_LOW (DECL_SIZE (alias))) { #ifdef ACCESS_DEBUGGING fprintf (stderr, "Access to "); @@ -1417,20 +1305,19 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, return true; } - -/* Add VAR to the virtual operands array. FLAGS is as in +/* Add VAR to the virtual operands for STMT. FLAGS is as in get_expr_operands. FULL_REF is a tree that contains the entire pointer dereference expression, if available, or NULL otherwise. OFFSET and SIZE come from the memory access expression that - generated this virtual operand. FOR_CLOBBER is true is this is - adding a virtual operand for a call clobber. */ + generated this virtual operand. IS_CALL_SITE is true if the + affected statement is a call site. */ -static void -add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, +static void +add_virtual_operand (tree var, gimple stmt, int flags, tree full_ref, HOST_WIDE_INT offset, - HOST_WIDE_INT size, bool for_clobber) + HOST_WIDE_INT size, bool is_call_site) { - VEC(tree,gc) *aliases; + bitmap aliases = NULL; tree sym; var_ann_t v_ann; @@ -1438,12 +1325,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, v_ann = var_ann (sym); /* Mark the statement as having memory operands. */ - s_ann->references_memory = true; - - /* Mark statements with volatile operands. Optimizers should back - off from statements having volatile operands. */ - if (TREE_THIS_VOLATILE (sym) && s_ann) - s_ann->has_volatile_ops = true; + gimple_set_references_memory (stmt, true); /* If the variable cannot be modified and this is a VDEF change it into a VUSE. This happens when read-only variables are marked @@ -1451,7 +1333,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, check that this only happens on non-specific stores. Note that if this is a specific store, i.e. associated with a - GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run + MODIFY_EXPR, then we can't suppress the VDEF, lest we run into validation problems. This can happen when programs cast away const, leaving us with a @@ -1468,9 +1350,14 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, if (flags & opf_no_vops) return; - aliases = v_ann->may_aliases; + if (MTAG_P (var)) + aliases = MTAG_ALIASES (var); + if (aliases == NULL) { + if (!gimple_aliases_computed_p (cfun) && (flags & opf_def)) + gimple_set_has_volatile_ops (stmt, true); + /* The variable is not aliased or it is an alias tag. */ if (flags & opf_def) append_vdef (var); @@ -1479,26 +1366,40 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, } else { - unsigned i; - tree al; + bitmap_iterator bi; + unsigned int i; + bool none_added = true; /* The variable is aliased. Add its aliases to the virtual operands. */ - gcc_assert (VEC_length (tree, aliases) != 0); - - if (flags & opf_def) - { - bool none_added = true; + gcc_assert (!bitmap_empty_p (aliases)); - for (i = 0; VEC_iterate (tree, aliases, i, al); i++) - { - if (!access_can_touch_variable (full_ref, al, offset, size)) - continue; - - none_added = false; - append_vdef (al); - } + EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi) + { + tree al = referenced_var (i); + + /* Call-clobbered tags may have non-call-clobbered + symbols in their alias sets. Ignore them if we are + adding VOPs for a call site. */ + if (is_call_site && !is_call_clobbered (al)) + continue; + + /* If we do not know the full reference tree or if the access is + unspecified [0, -1], we cannot prune it. Otherwise try doing + so using access_can_touch_variable. */ + if (full_ref + && !access_can_touch_variable (full_ref, al, offset, size)) + continue; + + if (flags & opf_def) + append_vdef (al); + else + append_vuse (al); + none_added = false; + } + if (flags & opf_def) + { /* If the variable is also an alias tag, add a virtual operand for it, otherwise we will miss representing references to the members of the variable's alias set. @@ -1510,46 +1411,37 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, keep the number of these bare defs we add down to the minimum necessary, we keep track of which SMT's were used alone in statement vdefs or VUSEs. */ - if (v_ann->is_aliased - || none_added + if (none_added || (TREE_CODE (var) == SYMBOL_MEMORY_TAG - && for_clobber)) - { - append_vdef (var); - } + && is_call_site)) + append_vdef (var); } else { - bool none_added = true; - for (i = 0; VEC_iterate (tree, aliases, i, al); i++) - { - if (!access_can_touch_variable (full_ref, al, offset, size)) - continue; - none_added = false; - append_vuse (al); - } - - /* Similarly, append a virtual uses for VAR itself, when - it is an alias tag. */ - if (v_ann->is_aliased || none_added) + /* Even if no aliases have been added, we still need to + establish def-use and use-def chains, lest + transformations think that this is not a memory + reference. For an example of this scenario, see + testsuite/g++.dg/opt/cleanup1.C. */ + if (none_added) append_vuse (var); } } } -/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in - get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to - the statement's real operands, otherwise it is added to virtual - operands. */ +/* Add *VAR_P to the appropriate operand array for statement STMT. + FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, + it will be added to the statement's real operands, otherwise it is + added to virtual operands. */ static void -add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) +add_stmt_operand (tree *var_p, gimple stmt, int flags) { tree var, sym; var_ann_t v_ann; - gcc_assert (SSA_VAR_P (*var_p) && s_ann); + gcc_assert (SSA_VAR_P (*var_p)); var = *var_p; sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); @@ -1557,7 +1449,7 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) /* Mark statements with volatile operands. */ if (TREE_THIS_VOLATILE (sym)) - s_ann->has_volatile_ops = true; + gimple_set_has_volatile_ops (stmt, true); if (is_gimple_reg (sym)) { @@ -1568,39 +1460,22 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) append_use (var_p); } else - add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false); + add_virtual_operand (var, stmt, flags, NULL_TREE, 0, -1, false); } - -/* A subroutine of get_expr_operands to handle INDIRECT_REF, - ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. - - STMT is the statement being processed, EXPR is the INDIRECT_REF - that got us here. - - FLAGS is as in get_expr_operands. - - FULL_REF contains the full pointer dereference expression, if we - have it, or NULL otherwise. - - OFFSET and SIZE are the location of the access inside the - dereferenced pointer, if known. - - RECURSE_ON_BASE should be set to true if we want to continue - calling get_expr_operands on the base pointer, and false if - something else will do it for us. */ +/* Subroutine of get_indirect_ref_operands. ADDR is the address + that is dereferenced, the meaning of the rest of the arguments + is the same as in get_indirect_ref_operands. */ static void -get_indirect_ref_operands (tree stmt, tree expr, int flags, - tree full_ref, - HOST_WIDE_INT offset, HOST_WIDE_INT size, - bool recurse_on_base) +get_addr_dereference_operands (gimple stmt, tree *addr, int flags, + tree full_ref, HOST_WIDE_INT offset, + HOST_WIDE_INT size, bool recurse_on_base) { - tree *pptr = &TREE_OPERAND (expr, 0); - tree ptr = *pptr; - stmt_ann_t s_ann = stmt_ann (stmt); + tree ptr = *addr; - s_ann->references_memory = true; + /* Mark the statement as having memory operands. */ + gimple_set_references_memory (stmt, true); if (SSA_VAR_P (ptr)) { @@ -1612,7 +1487,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags, && pi->name_mem_tag) { /* PTR has its own memory tag. Use it. */ - add_virtual_operand (pi->name_mem_tag, s_ann, flags, + add_virtual_operand (pi->name_mem_tag, stmt, flags, full_ref, offset, size, false); } else @@ -1624,25 +1499,48 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags, /* If we are emitting debugging dumps, display a warning if PTR is an SSA_NAME with no flow-sensitive alias information. That means that we may need to compute - aliasing again. */ + aliasing again or that a propagation pass forgot to + update the alias information on the pointers. */ if (dump_file && TREE_CODE (ptr) == SSA_NAME - && pi == NULL) + && (pi == NULL + || (pi->name_mem_tag == NULL_TREE + && !pi->pt_anything)) + && gimple_aliases_computed_p (cfun)) { fprintf (dump_file, "NOTE: no flow-sensitive alias info for "); print_generic_expr (dump_file, ptr, dump_flags); fprintf (dump_file, " in "); - print_generic_stmt (dump_file, stmt, dump_flags); + print_gimple_stmt (dump_file, stmt, 0, 0); } if (TREE_CODE (ptr) == SSA_NAME) ptr = SSA_NAME_VAR (ptr); v_ann = var_ann (ptr); + /* If we don't know what this pointer points to then we have + to make sure to not prune virtual operands based on offset + and size. */ if (v_ann->symbol_mem_tag) - add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags, - full_ref, offset, size, false); + { + add_virtual_operand (v_ann->symbol_mem_tag, stmt, flags, + full_ref, 0, -1, false); + /* Make sure we add the SMT itself. */ + if (!(flags & opf_no_vops)) + { + if (flags & opf_def) + append_vdef (v_ann->symbol_mem_tag); + else + append_vuse (v_ann->symbol_mem_tag); + } + } + + /* Aliasing information is missing; mark statement as + volatile so we won't optimize it out too actively. */ + else if (!gimple_aliases_computed_p (cfun) + && (flags & opf_def)) + gimple_set_has_volatile_ops (stmt, true); } } else if (TREE_CODE (ptr) == INTEGER_CST) @@ -1650,8 +1548,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags, /* If a constant is used as a pointer, we can't generate a real operand for it but we mark the statement volatile to prevent optimizations from messing things up. */ - if (s_ann) - s_ann->has_volatile_ops = true; + gimple_set_has_volatile_ops (stmt, true); return; } else @@ -1662,54 +1559,74 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags, /* If requested, add a USE operand for the base pointer. */ if (recurse_on_base) - get_expr_operands (stmt, pptr, opf_use); + get_expr_operands (stmt, addr, opf_use); +} + + +/* A subroutine of get_expr_operands to handle INDIRECT_REF, + ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. + + STMT is the statement being processed, EXPR is the INDIRECT_REF + that got us here. + + FLAGS is as in get_expr_operands. + + FULL_REF contains the full pointer dereference expression, if we + have it, or NULL otherwise. + + OFFSET and SIZE are the location of the access inside the + dereferenced pointer, if known. + + RECURSE_ON_BASE should be set to true if we want to continue + calling get_expr_operands on the base pointer, and false if + something else will do it for us. */ + +static void +get_indirect_ref_operands (gimple stmt, tree expr, int flags, tree full_ref, + HOST_WIDE_INT offset, HOST_WIDE_INT size, + bool recurse_on_base) +{ + tree *pptr = &TREE_OPERAND (expr, 0); + + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + + get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size, + recurse_on_base); } /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ static void -get_tmr_operands (tree stmt, tree expr, int flags) +get_tmr_operands (gimple stmt, tree expr, int flags) { - tree tag, ref; - HOST_WIDE_INT offset, size, maxsize; - subvar_t svars, sv; - stmt_ann_t s_ann = stmt_ann (stmt); + tree tag; - /* This statement references memory. */ - s_ann->references_memory = 1; + /* Mark the statement as having memory operands. */ + gimple_set_references_memory (stmt, true); /* First record the real operands. */ get_expr_operands (stmt, &TMR_BASE (expr), opf_use); get_expr_operands (stmt, &TMR_INDEX (expr), opf_use); if (TMR_SYMBOL (expr)) - add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken); + gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr)); tag = TMR_TAG (expr); if (!tag) { /* Something weird, so ensure that we will be careful. */ - s_ann->has_volatile_ops = true; + gimple_set_has_volatile_ops (stmt, true); return; } - - if (DECL_P (tag)) + if (!MTAG_P (tag)) { get_expr_operands (stmt, &tag, flags); return; } - ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize); - gcc_assert (ref != NULL_TREE); - svars = get_subvars_for_var (ref); - for (sv = svars; sv; sv = sv->next) - { - bool exact; - - if (overlap_subvar (offset, maxsize, sv->var, &exact)) - add_stmt_operand (&sv->var, s_ann, flags); - } + add_virtual_operand (tag, stmt, flags, expr, 0, -1, false); } @@ -1717,85 +1634,59 @@ get_tmr_operands (tree stmt, tree expr, int flags) clobbered variables in the function. */ static void -add_call_clobber_ops (tree stmt, tree callee) +add_call_clobber_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) { unsigned u; bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); bitmap not_read_b, not_written_b; - - /* Functions that are not const, pure or never return may clobber - call-clobbered variables. */ - if (s_ann) - s_ann->makes_clobbering_call = true; - /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases - for the heuristic used to decide whether to create .GLOBAL_VAR or not. */ + gcc_assert (!(gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))); + + /* If we created .GLOBAL_VAR earlier, just use it. */ if (gimple_global_var (cfun)) { tree var = gimple_global_var (cfun); - add_stmt_operand (&var, s_ann, opf_def); + add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); return; } /* Get info for local and module level statics. There is a bit set for each static if the call being processed does not read or write that variable. */ - not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; - not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; + not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; + not_written_b = callee ? ipa_reference_get_not_written_global (cgraph_node (callee)) : NULL; /* Add a VDEF operand for every call clobbered variable. */ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) { tree var = referenced_var_lookup (u); - unsigned int escape_mask = var_ann (var)->escape_mask; tree real_var = var; bool not_read; bool not_written; - - /* Not read and not written are computed on regular vars, not - subvars, so look at the parent var if this is an SFT. */ - if (TREE_CODE (var) == STRUCT_FIELD_TAG) - real_var = SFT_PARENT_VAR (var); - - not_read = not_read_b ? bitmap_bit_p (not_read_b, - DECL_UID (real_var)) : false; - not_written = not_written_b ? bitmap_bit_p (not_written_b, - DECL_UID (real_var)) : false; + + not_read = not_read_b + ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) + : false; + + not_written = not_written_b + ? bitmap_bit_p (not_written_b, DECL_UID (real_var)) + : false; gcc_assert (!unmodifiable_var_p (var)); clobber_stats.clobbered_vars++; /* See if this variable is really clobbered by this function. */ - /* Trivial case: Things escaping only to pure/const are not - clobbered by non-pure-const, and only read by pure/const. */ - if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0) - { - tree call = get_call_expr_in (stmt); - if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) - { - add_stmt_operand (&var, s_ann, opf_use); - clobber_stats.unescapable_clobbers_avoided++; - continue; - } - else - { - clobber_stats.unescapable_clobbers_avoided++; - continue; - } - } - if (not_written) { clobber_stats.static_write_clobbers_avoided++; if (!not_read) - add_stmt_operand (&var, s_ann, opf_use); + add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); else clobber_stats.static_read_clobbers_avoided++; } else - add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true); + add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true); } } @@ -1804,24 +1695,51 @@ add_call_clobber_ops (tree stmt, tree callee) function. */ static void -add_call_read_ops (tree stmt, tree callee) +add_call_read_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED) { unsigned u; bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); bitmap not_read_b; - /* if the function is not pure, it may reference memory. Add - a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var - for the heuristic used to decide whether to create .GLOBAL_VAR. */ + /* Const functions do not reference memory. */ + if (gimple_call_flags (stmt) & ECF_CONST) + return; + + not_read_b = callee ? ipa_reference_get_not_read_global (cgraph_node (callee)) : NULL; + + /* For pure functions we compute non-escaped uses separately. */ + if (gimple_call_flags (stmt) & ECF_PURE) + EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi) + { + tree var = referenced_var_lookup (u); + tree real_var = var; + bool not_read; + + if (unmodifiable_var_p (var)) + continue; + + not_read = not_read_b + ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) + : false; + + clobber_stats.readonly_clobbers++; + + /* See if this variable is really used by this function. */ + if (!not_read) + add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); + else + clobber_stats.static_readonly_clobbers_avoided++; + } + + /* Add a VUSE for .GLOBAL_VAR if it has been created. See + add_referenced_var for the heuristic used to decide whether to + create .GLOBAL_VAR. */ if (gimple_global_var (cfun)) { tree var = gimple_global_var (cfun); - add_stmt_operand (&var, s_ann, opf_use); + add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); return; } - - not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; /* Add a VUSE for each call-clobbered variable. */ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi) @@ -1832,12 +1750,6 @@ add_call_read_ops (tree stmt, tree callee) clobber_stats.readonly_clobbers++; - /* Not read and not written are computed on regular vars, not - subvars, so look at the parent var if this is an SFT. */ - - if (TREE_CODE (var) == STRUCT_FIELD_TAG) - real_var = SFT_PARENT_VAR (var); - not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) : false; @@ -1847,67 +1759,55 @@ add_call_read_ops (tree stmt, tree callee) continue; } - add_stmt_operand (&var, s_ann, opf_use | opf_implicit); + add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true); } } -/* A subroutine of get_expr_operands to handle CALL_EXPR. */ +/* If STMT is a call that may clobber globals and other symbols that + escape, add them to the VDEF/VUSE lists for it. */ static void -get_call_expr_operands (tree stmt, tree expr) +maybe_add_call_clobbered_vops (gimple stmt) { - tree op; - int call_flags = call_expr_flags (expr); - stmt_ann_t ann = stmt_ann (stmt); + int call_flags = gimple_call_flags (stmt); - ann->references_memory = true; + /* Mark the statement as having memory operands. */ + gimple_set_references_memory (stmt, true); /* If aliases have been computed already, add VDEF or VUSE operands for all the symbols that have been found to be call-clobbered. */ - if (gimple_aliases_computed_p (cfun) - && !(call_flags & ECF_NOVOPS)) + if (gimple_aliases_computed_p (cfun) && !(call_flags & ECF_NOVOPS)) { /* A 'pure' or a 'const' function never call-clobbers anything. A 'noreturn' function might, but since we don't return anyway there is no point in recording that. */ - if (TREE_SIDE_EFFECTS (expr) - && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) - add_call_clobber_ops (stmt, get_callee_fndecl (expr)); + if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) + add_call_clobber_ops (stmt, gimple_call_fndecl (stmt)); else if (!(call_flags & ECF_CONST)) - add_call_read_ops (stmt, get_callee_fndecl (expr)); + add_call_read_ops (stmt, gimple_call_fndecl (stmt)); } - - /* Find uses in the called function. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use); - - for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op)) - get_expr_operands (stmt, &TREE_VALUE (op), opf_use); - - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); } /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ static void -get_asm_expr_operands (tree stmt) +get_asm_expr_operands (gimple stmt) { - stmt_ann_t s_ann; - int i, noutputs; + size_t i, noutputs; const char **oconstraints; const char *constraint; bool allows_mem, allows_reg, is_inout; - tree link; - s_ann = stmt_ann (stmt); - noutputs = list_length (ASM_OUTPUTS (stmt)); + noutputs = gimple_asm_noutputs (stmt); oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); /* Gather all output operands. */ - for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link)) + for (i = 0; i < gimple_asm_noutputs (stmt); i++) { + tree link = gimple_asm_output_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); oconstraints[i] = constraint; parse_output_constraint (&constraint, i, 0, 0, &allows_mem, @@ -1921,16 +1821,17 @@ get_asm_expr_operands (tree stmt) if (!allows_reg && allows_mem) { tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t) && s_ann) - add_to_addressable_set (t, &s_ann->addresses_taken); + if (t && DECL_P (t)) + gimple_add_to_addresses_taken (stmt, t); } get_expr_operands (stmt, &TREE_VALUE (link), opf_def); } /* Gather all input operands. */ - for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link)) + for (i = 0; i < gimple_asm_ninputs (stmt); i++) { + tree link = gimple_asm_input_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, &allows_mem, &allows_reg); @@ -1940,68 +1841,39 @@ get_asm_expr_operands (tree stmt) if (!allows_reg && allows_mem) { tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t) && s_ann) - add_to_addressable_set (t, &s_ann->addresses_taken); + if (t && DECL_P (t)) + gimple_add_to_addresses_taken (stmt, t); } get_expr_operands (stmt, &TREE_VALUE (link), 0); } /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ - for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link)) - if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) - { - unsigned i; - bitmap_iterator bi; - - s_ann->references_memory = true; - - EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi) - { - tree var = referenced_var (i); - add_stmt_operand (&var, s_ann, opf_def | opf_implicit); - } - - EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi) - { - tree var = referenced_var (i); - - /* Subvars are explicitly represented in this list, so we - don't need the original to be added to the clobber ops, - but the original *will* be in this list because we keep - the addressability of the original variable up-to-date - to avoid confusing the back-end. */ - if (var_can_have_subvars (var) - && get_subvars_for_var (var) != NULL) - continue; - - add_stmt_operand (&var, s_ann, opf_def | opf_implicit); - } - break; - } -} - - -/* Scan operands for the assignment expression EXPR in statement STMT. */ - -static void -get_modify_stmt_operands (tree stmt, tree expr) -{ - /* First get operands from the RHS. */ - get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use); + for (i = 0; i < gimple_asm_nclobbers (stmt); i++) + { + tree link = gimple_asm_clobber_op (stmt, i); + if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) + { + unsigned i; + bitmap_iterator bi; - /* For the LHS, use a regular definition (opf_def) for GIMPLE - registers. If the LHS is a store to memory, we will need - a preserving definition (VDEF). + /* Mark the statement as having memory operands. */ + gimple_set_references_memory (stmt, true); - Preserving definitions are those that modify a part of an - aggregate object for which no subvars have been computed (or the - reference does not correspond exactly to one of them). Stores - through a pointer are also represented with VDEF operators. + EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi) + { + tree var = referenced_var (i); + add_stmt_operand (&var, stmt, opf_def | opf_implicit); + } - We used to distinguish between preserving and killing definitions. - We always emit preserving definitions now. */ - get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def); + EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi) + { + tree var = referenced_var (i); + add_stmt_operand (&var, stmt, opf_def | opf_implicit); + } + break; + } + } } @@ -2010,18 +1882,17 @@ get_modify_stmt_operands (tree stmt, tree expr) interpret the operands found. */ static void -get_expr_operands (tree stmt, tree *expr_p, int flags) +get_expr_operands (gimple stmt, tree *expr_p, int flags) { enum tree_code code; - enum tree_code_class class; + enum tree_code_class codeclass; tree expr = *expr_p; - stmt_ann_t s_ann = stmt_ann (stmt); if (expr == NULL) return; code = TREE_CODE (expr); - class = TREE_CODE_CLASS (code); + codeclass = TREE_CODE_CLASS (code); switch (code) { @@ -2030,7 +1901,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) reference to it, but the fact that the statement takes its address will be of interest to some passes (e.g. alias resolution). */ - add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken); + gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0)); /* If the address is invariant, there may be no interesting variable references inside. */ @@ -2047,33 +1918,16 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) return; case SSA_NAME: - case STRUCT_FIELD_TAG: case SYMBOL_MEMORY_TAG: case NAME_MEMORY_TAG: - add_stmt_operand (expr_p, s_ann, flags); + add_stmt_operand (expr_p, stmt, flags); return; case VAR_DECL: case PARM_DECL: case RESULT_DECL: - { - subvar_t svars; - - /* Add the subvars for a variable, if it has subvars, to DEFS - or USES. Otherwise, add the variable itself. Whether it - goes to USES or DEFS depends on the operand flags. */ - if (var_can_have_subvars (expr) - && (svars = get_subvars_for_var (expr))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - add_stmt_operand (&sv->var, s_ann, flags); - } - else - add_stmt_operand (expr_p, s_ann, flags); - - return; - } + add_stmt_operand (expr_p, stmt, flags); + return; case MISALIGNED_INDIRECT_REF: get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); @@ -2081,7 +1935,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case ALIGN_INDIRECT_REF: case INDIRECT_REF: - get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true); + get_indirect_ref_operands (stmt, expr, flags, expr, 0, -1, true); return; case TARGET_MEM_REF: @@ -2096,51 +1950,24 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) { tree ref; HOST_WIDE_INT offset, size, maxsize; - bool none = true; - - /* This component reference becomes an access to all of the - subvariables it can touch, if we can determine that, but - *NOT* the real one. If we can't determine which fields we - could touch, the recursion will eventually get to a - variable and add *all* of its subvars, or whatever is the - minimum correct subset. */ - ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); - if (SSA_VAR_P (ref) && get_subvars_for_var (ref)) - { - subvar_t sv; - subvar_t svars = get_subvars_for_var (ref); - for (sv = svars; sv; sv = sv->next) - { - bool exact; - - if (overlap_subvar (offset, maxsize, sv->var, &exact)) - { - int subvar_flags = flags; - none = false; - add_stmt_operand (&sv->var, s_ann, subvar_flags); - } - } + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); - if (!none) - flags |= opf_no_vops; - } - else if (TREE_CODE (ref) == INDIRECT_REF) + ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); + if (TREE_CODE (ref) == INDIRECT_REF) { get_indirect_ref_operands (stmt, ref, flags, expr, offset, maxsize, false); flags |= opf_no_vops; } - /* Even if we found subvars above we need to ensure to see - immediate uses for d in s.a[d]. In case of s.a having - a subvar or we would miss it otherwise. */ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); if (code == COMPONENT_REF) { - if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) - s_ann->has_volatile_ops = true; + if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) + gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); } else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) @@ -2160,10 +1987,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; - case CALL_EXPR: - get_call_expr_operands (stmt, expr); - return; - case COND_EXPR: case VEC_COND_EXPR: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use); @@ -2171,10 +1994,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use); return; - case GIMPLE_MODIFY_STMT: - get_modify_stmt_operands (stmt, expr); - return; - case CONSTRUCTOR: { /* General aggregate CONSTRUCTORs have been decomposed, but they @@ -2191,6 +2010,10 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) } case BIT_FIELD_REF: + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + /* FALLTHRU */ + case TRUTH_NOT_EXPR: case VIEW_CONVERT_EXPR: do_unary: @@ -2219,30 +2042,24 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) return; } - case BLOCK: + case CHANGE_DYNAMIC_TYPE_EXPR: + gcc_unreachable (); + case FUNCTION_DECL: - case EXC_PTR_EXPR: - case FILTER_EXPR: case LABEL_DECL: case CONST_DECL: - case OMP_PARALLEL: - case OMP_SECTIONS: - case OMP_FOR: - case OMP_SINGLE: - case OMP_MASTER: - case OMP_ORDERED: - case OMP_CRITICAL: - case OMP_RETURN: - case OMP_CONTINUE: + case CASE_LABEL_EXPR: + case FILTER_EXPR: + case EXC_PTR_EXPR: /* Expressions that make no memory references. */ return; default: - if (class == tcc_unary) + if (codeclass == tcc_unary) goto do_unary; - if (class == tcc_binary || class == tcc_comparison) + if (codeclass == tcc_binary || codeclass == tcc_comparison) goto do_binary; - if (class == tcc_constant || class == tcc_type) + if (codeclass == tcc_constant || codeclass == tcc_type) return; } @@ -2260,59 +2077,28 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) build_* operand vectors will have potential operands in them. */ static void -parse_ssa_operands (tree stmt) +parse_ssa_operands (gimple stmt) { - enum tree_code code; + enum gimple_code code = gimple_code (stmt); - code = TREE_CODE (stmt); - switch (code) + if (code == GIMPLE_ASM) + get_asm_expr_operands (stmt); + else { - case GIMPLE_MODIFY_STMT: - get_modify_stmt_operands (stmt, stmt); - break; - - case COND_EXPR: - get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use); - break; - - case SWITCH_EXPR: - get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use); - break; + size_t i, start = 0; - case ASM_EXPR: - get_asm_expr_operands (stmt); - break; - - case RETURN_EXPR: - get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use); - break; - - case GOTO_EXPR: - get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use); - break; - - case LABEL_EXPR: - get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use); - break; + if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) + { + get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); + start = 1; + } - case BIND_EXPR: - case CASE_LABEL_EXPR: - case TRY_CATCH_EXPR: - case TRY_FINALLY_EXPR: - case EH_FILTER_EXPR: - case CATCH_EXPR: - case RESX_EXPR: - /* These nodes contain no variable references. */ - break; + for (i = start; i < gimple_num_ops (stmt); i++) + get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); - default: - /* Notice that if get_expr_operands tries to use &STMT as the - operand pointer (which may only happen for USE operands), we - will fail in add_stmt_operand. This default will handle - statements like empty statements, or CALL_EXPRs that may - appear on the RHS of a statement or as statements themselves. */ - get_expr_operands (stmt, &stmt, opf_use); - break; + /* Add call-clobbered operands, if needed. */ + if (code == GIMPLE_CALL) + maybe_add_call_clobbered_vops (stmt); } } @@ -2320,14 +2106,16 @@ parse_ssa_operands (tree stmt) /* Create an operands cache for STMT. */ static void -build_ssa_operands (tree stmt) +build_ssa_operands (gimple stmt) { - stmt_ann_t ann = get_stmt_ann (stmt); - /* Initially assume that the statement has no volatile operands and makes no memory references. */ - ann->has_volatile_ops = false; - ann->references_memory = false; + gimple_set_has_volatile_ops (stmt, false); + gimple_set_references_memory (stmt, false); + + /* Just clear the bitmap so we don't end up reallocating it over and over. */ + if (gimple_addresses_taken (stmt)) + bitmap_clear (gimple_addresses_taken (stmt)); start_ssa_stmt_operands (); parse_ssa_operands (stmt); @@ -2337,46 +2125,87 @@ build_ssa_operands (tree stmt) /* For added safety, assume that statements with volatile operands also reference memory. */ - if (ann->has_volatile_ops) - ann->references_memory = true; + if (gimple_has_volatile_ops (stmt)) + gimple_set_references_memory (stmt, true); } -/* Free any operands vectors in OPS. */ +/* Releases the operands of STMT back to their freelists, and clears + the stmt operand lists. */ -void -free_ssa_operands (stmt_operands_p ops) +void +free_stmt_operands (gimple stmt) { - ops->def_ops = NULL; - ops->use_ops = NULL; - ops->vdef_ops = NULL; - ops->vuse_ops = NULL; - BITMAP_FREE (ops->loads); - BITMAP_FREE (ops->stores); + def_optype_p defs = gimple_def_ops (stmt), last_def; + use_optype_p uses = gimple_use_ops (stmt), last_use; + voptype_p vuses = gimple_vuse_ops (stmt); + voptype_p vdefs = gimple_vdef_ops (stmt), vdef, next_vdef; + unsigned i; + + if (defs) + { + for (last_def = defs; last_def->next; last_def = last_def->next) + continue; + last_def->next = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs = defs; + gimple_set_def_ops (stmt, NULL); + } + + if (uses) + { + for (last_use = uses; last_use->next; last_use = last_use->next) + delink_imm_use (USE_OP_PTR (last_use)); + delink_imm_use (USE_OP_PTR (last_use)); + last_use->next = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses = uses; + gimple_set_use_ops (stmt, NULL); + } + + if (vuses) + { + for (i = 0; i < VUSE_NUM (vuses); i++) + delink_imm_use (VUSE_OP_PTR (vuses, i)); + add_vop_to_freelist (vuses); + gimple_set_vuse_ops (stmt, NULL); + } + + if (vdefs) + { + for (vdef = vdefs; vdef; vdef = next_vdef) + { + next_vdef = vdef->next; + delink_imm_use (VDEF_OP_PTR (vdef, 0)); + add_vop_to_freelist (vdef); + } + gimple_set_vdef_ops (stmt, NULL); + } + + if (gimple_has_ops (stmt)) + gimple_set_addresses_taken (stmt, NULL); + + if (gimple_has_mem_ops (stmt)) + { + gimple_set_stored_syms (stmt, NULL, &operands_bitmap_obstack); + gimple_set_loaded_syms (stmt, NULL, &operands_bitmap_obstack); + } } /* Get the operands of statement STMT. */ void -update_stmt_operands (tree stmt) +update_stmt_operands (gimple stmt) { - stmt_ann_t ann = get_stmt_ann (stmt); - /* If update_stmt_operands is called before SSA is initialized, do nothing. */ if (!ssa_operands_active ()) return; - /* The optimizers cannot handle statements that are nothing but a - _DECL. This indicates a bug in the gimplifier. */ - gcc_assert (!SSA_VAR_P (stmt)); - timevar_push (TV_TREE_OPS); - gcc_assert (ann->modified); + gcc_assert (gimple_modified_p (stmt)); build_ssa_operands (stmt); - ann->modified = 0; + gimple_set_modified (stmt, false); timevar_pop (TV_TREE_OPS); } @@ -2385,50 +2214,45 @@ update_stmt_operands (tree stmt) /* Copies virtual operands from SRC to DST. */ void -copy_virtual_operands (tree dest, tree src) +copy_virtual_operands (gimple dest, gimple src) { - int i, n; + unsigned int i, n; voptype_p src_vuses, dest_vuses; voptype_p src_vdefs, dest_vdefs; struct voptype_d vuse; struct voptype_d vdef; - stmt_ann_t dest_ann; - - VDEF_OPS (dest) = NULL; - VUSE_OPS (dest) = NULL; - dest_ann = get_stmt_ann (dest); - BITMAP_FREE (dest_ann->operands.loads); - BITMAP_FREE (dest_ann->operands.stores); + if (!gimple_has_mem_ops (src)) + return; - if (LOADED_SYMS (src)) - { - dest_ann->operands.loads = BITMAP_ALLOC (NULL); - bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src)); - } + gimple_set_vdef_ops (dest, NULL); + gimple_set_vuse_ops (dest, NULL); - if (STORED_SYMS (src)) - { - dest_ann->operands.stores = BITMAP_ALLOC (NULL); - bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src)); - } + gimple_set_stored_syms (dest, gimple_stored_syms (src), + &operands_bitmap_obstack); + gimple_set_loaded_syms (dest, gimple_loaded_syms (src), + &operands_bitmap_obstack); /* Copy all the VUSE operators and corresponding operands. */ dest_vuses = &vuse; - for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next) + for (src_vuses = gimple_vuse_ops (src); + src_vuses; + src_vuses = src_vuses->next) { n = VUSE_NUM (src_vuses); dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses); for (i = 0; i < n; i++) SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i)); - if (VUSE_OPS (dest) == NULL) - VUSE_OPS (dest) = vuse.next; + if (gimple_vuse_ops (dest) == NULL) + gimple_set_vuse_ops (dest, vuse.next); } /* Copy all the VDEF operators and corresponding operands. */ dest_vdefs = &vdef; - for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next) + for (src_vdefs = gimple_vdef_ops (src); + src_vdefs; + src_vdefs = src_vdefs->next) { n = VUSE_NUM (src_vdefs); dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs); @@ -2436,8 +2260,8 @@ copy_virtual_operands (tree dest, tree src) for (i = 0; i < n; i++) SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i)); - if (VDEF_OPS (dest) == NULL) - VDEF_OPS (dest) = vdef.next; + if (gimple_vdef_ops (dest) == NULL) + gimple_set_vdef_ops (dest, vdef.next); } } @@ -2446,17 +2270,19 @@ copy_virtual_operands (tree dest, tree src) create an artificial stmt which looks like a load from the store, this can be used to eliminate redundant loads. OLD_OPS are the operands from the store stmt, and NEW_STMT is the new load which represents a load of the - values stored. */ + values stored. If DELINK_IMM_USES_P is specified, the immediate + uses of this stmt will be de-linked. */ void -create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt) +create_ssa_artificial_load_stmt (gimple new_stmt, gimple old_stmt, + bool delink_imm_uses_p) { tree op; ssa_op_iter iter; use_operand_p use_p; unsigned i; - get_stmt_ann (new_stmt); + gimple_set_modified (new_stmt, false); /* Process NEW_STMT looking for operands. */ start_ssa_stmt_operands (); @@ -2466,7 +2292,7 @@ create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt) if (TREE_CODE (op) != SSA_NAME) var_ann (op)->in_vuse_list = false; - for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++) + for (i = 0; VEC_iterate (tree, build_vdefs, i, op); i++) if (TREE_CODE (op) != SSA_NAME) var_ann (op)->in_vdef_list = false; @@ -2474,6 +2300,10 @@ create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt) VEC_truncate (tree, build_vdefs, 0); VEC_truncate (tree, build_vuses, 0); + /* Clear the loads and stores bitmaps. */ + bitmap_clear (build_loads); + bitmap_clear (build_stores); + /* For each VDEF on the original statement, we want to create a VUSE of the VDEF result operand on the new statement. */ FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF) @@ -2482,8 +2312,9 @@ create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt) finalize_ssa_stmt_operands (new_stmt); /* All uses in this fake stmt must not be in the immediate use lists. */ - FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) - delink_imm_use (use_p); + if (delink_imm_uses_p) + FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) + delink_imm_use (use_p); } @@ -2491,7 +2322,7 @@ create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt) to test the validity of the swap operation. */ void -swap_tree_operands (tree stmt, tree *exp0, tree *exp1) +swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) { tree op0, op1; op0 = *exp0; @@ -2506,14 +2337,14 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1) use0 = use1 = NULL; /* Find the 2 operands in the cache, if they are there. */ - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) if (USE_OP_PTR (ptr)->use == exp0) { use0 = ptr; break; } - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) if (USE_OP_PTR (ptr)->use == exp1) { use1 = ptr; @@ -2535,21 +2366,12 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1) *exp1 = op0; } - -/* Add the base address of REF to the set *ADDRESSES_TAKEN. If - *ADDRESSES_TAKEN is NULL, a new set is created. REF may be - a single variable whose address has been taken or any other valid - GIMPLE memory reference (structure reference, array, etc). If the - base address of REF is a decl that has sub-variables, also add all - of its sub-variables. */ +/* Add the base address of REF to SET. */ void -add_to_addressable_set (tree ref, bitmap *addresses_taken) +add_to_addressable_set (tree ref, bitmap *set) { tree var; - subvar_t svars; - - gcc_assert (addresses_taken); /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF as the only thing we take the address of. If VAR is a structure, @@ -2559,28 +2381,29 @@ add_to_addressable_set (tree ref, bitmap *addresses_taken) var = get_base_address (ref); if (var && SSA_VAR_P (var)) { - if (*addresses_taken == NULL) - *addresses_taken = BITMAP_GGC_ALLOC (); - - if (var_can_have_subvars (var) - && (svars = get_subvars_for_var (var))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - { - bitmap_set_bit (*addresses_taken, DECL_UID (sv->var)); - TREE_ADDRESSABLE (sv->var) = 1; - } - } - else - { - bitmap_set_bit (*addresses_taken, DECL_UID (var)); - TREE_ADDRESSABLE (var) = 1; - } + if (*set == NULL) + *set = BITMAP_ALLOC (&operands_bitmap_obstack); + + bitmap_set_bit (*set, DECL_UID (var)); + TREE_ADDRESSABLE (var) = 1; } } +/* Add the base address of REF to the set of addresses taken by STMT. + REF may be a single variable whose address has been taken or any + other valid GIMPLE memory reference (structure reference, array, + etc). If the base address of REF is a decl that has sub-variables, + also add all of its sub-variables. */ + +void +gimple_add_to_addresses_taken (gimple stmt, tree ref) +{ + gcc_assert (gimple_has_ops (stmt)); + add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt)); +} + + /* Scan the immediate_use list for VAR making sure its linked properly. Return TRUE if there is a problem and emit an error message to F. */ @@ -2640,10 +2463,10 @@ verify_imm_links (FILE *f, tree var) return false; error: - if (ptr->stmt && stmt_modified_p (ptr->stmt)) + if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) { - fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt); - print_generic_stmt (f, ptr->stmt, TDF_SLIM); + fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); + print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); } fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, (void *)ptr->use); @@ -2675,13 +2498,13 @@ dump_immediate_uses_for (FILE *file, tree var) FOR_EACH_IMM_USE_FAST (use_p, iter, var) { - if (use_p->stmt == NULL && use_p->use == NULL) + if (use_p->loc.stmt == NULL && use_p->use == NULL) fprintf (file, "***end of stmt iterator marker***\n"); else if (!is_gimple_reg (USE_FROM_PTR (use_p))) - print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS); + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); else - print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM); + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); } fprintf(file, "\n"); } @@ -2732,23 +2555,23 @@ debug_immediate_uses_for (tree var) needed to keep the SSA form up to date. */ void -push_stmt_changes (tree *stmt_p) +push_stmt_changes (gimple *stmt_p) { - tree stmt; + gimple stmt; scb_t buf; - + stmt = *stmt_p; /* It makes no sense to keep track of PHI nodes. */ - if (TREE_CODE (stmt) == PHI_NODE) + if (gimple_code (stmt) == GIMPLE_PHI) return; - buf = xmalloc (sizeof *buf); + buf = XNEW (struct scb_d); memset (buf, 0, sizeof *buf); buf->stmt_p = stmt_p; - if (stmt_references_memory_p (stmt)) + if (gimple_references_memory_p (stmt)) { tree op; ssa_op_iter i; @@ -2790,15 +2613,9 @@ mark_difference_for_renaming (bitmap s1, bitmap s2) else if (!bitmap_equal_p (s1, s2)) { bitmap t1 = BITMAP_ALLOC (NULL); - bitmap t2 = BITMAP_ALLOC (NULL); - - bitmap_and_compl (t1, s1, s2); - bitmap_and_compl (t2, s2, s1); - bitmap_ior_into (t1, t2); + bitmap_xor (t1, s1, s2); mark_set_for_renaming (t1); - BITMAP_FREE (t1); - BITMAP_FREE (t2); } } @@ -2808,9 +2625,10 @@ mark_difference_for_renaming (bitmap s1, bitmap s2) the statement. */ void -pop_stmt_changes (tree *stmt_p) +pop_stmt_changes (gimple *stmt_p) { - tree op, stmt; + tree op; + gimple stmt; ssa_op_iter iter; bitmap loads, stores; scb_t buf; @@ -2818,7 +2636,7 @@ pop_stmt_changes (tree *stmt_p) stmt = *stmt_p; /* It makes no sense to keep track of PHI nodes. */ - if (TREE_CODE (stmt) == PHI_NODE) + if (gimple_code (stmt) == GIMPLE_PHI) return; buf = VEC_pop (scb_t, scb_stack); @@ -2836,7 +2654,7 @@ pop_stmt_changes (tree *stmt_p) memory anymore, but we still need to act on the differences in the sets of symbols. */ loads = stores = NULL; - if (stmt_references_memory_p (stmt)) + if (gimple_references_memory_p (stmt)) { tree op; ssa_op_iter i; @@ -2899,14 +2717,14 @@ pop_stmt_changes (tree *stmt_p) statement. It avoids the expensive operand re-scan. */ void -discard_stmt_changes (tree *stmt_p) +discard_stmt_changes (gimple *stmt_p) { scb_t buf; - tree stmt; + gimple stmt; /* It makes no sense to keep track of PHI nodes. */ stmt = *stmt_p; - if (TREE_CODE (stmt) == PHI_NODE) + if (gimple_code (stmt) == GIMPLE_PHI) return; buf = VEC_pop (scb_t, scb_stack); @@ -2918,101 +2736,3 @@ discard_stmt_changes (tree *stmt_p) buf->stmt_p = NULL; free (buf); } - - -/* Returns true if statement STMT may access memory. */ - -bool -stmt_references_memory_p (tree stmt) -{ - if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE) - return false; - - return stmt_ann (stmt)->references_memory; -} - - -/* Return the memory partition tag (MPT) associated with memory - symbol SYM. From a correctness standpoint, memory partitions can - be assigned in any arbitrary fashion as long as this rule is - observed: Given two memory partitions MPT.i and MPT.j, they must - not contain symbols in common. - - Memory partitions are used when putting the program into Memory-SSA - form. In particular, in Memory-SSA PHI nodes are not computed for - individual memory symbols. They are computed for memory - partitions. This reduces the amount of PHI nodes in the SSA graph - at the expense of precision (i.e., it makes unrelated stores affect - each other). - - However, it is possible to increase precision by changing this - partitioning scheme. For instance, if the partitioning scheme is - such that get_mpt_for is the identity function (that is, - get_mpt_for (s) = s), this will result in ultimate precision at the - expense of huge SSA webs. - - At the other extreme, a partitioning scheme that groups all the - symbols in the same set results in minimal SSA webs and almost - total loss of precision. */ - -tree -get_mpt_for (tree sym) -{ - tree mpt; - - /* Don't create a new tag unnecessarily. */ - mpt = memory_partition (sym); - if (mpt == NULL_TREE) - { - mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT"); - TREE_ADDRESSABLE (mpt) = 0; - MTAG_GLOBAL (mpt) = 1; - add_referenced_var (mpt); - VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt); - MPT_SYMBOLS (mpt) = BITMAP_ALLOC (NULL); - set_memory_partition (sym, mpt); - } - - return mpt; -} - - -/* Dump memory partition information to FILE. */ - -void -dump_memory_partitions (FILE *file) -{ - unsigned i, npart; - unsigned long nsyms; - tree mpt; - - fprintf (file, "\nMemory partitions\n\n"); - for (i = 0, npart = 0, nsyms = 0; - VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt); - i++) - { - if (mpt) - { - bitmap syms = MPT_SYMBOLS (mpt); - unsigned long n = bitmap_count_bits (syms); - - fprintf (file, "#%u: ", i); - print_generic_expr (file, mpt, 0); - fprintf (file, ": %lu elements: ", n); - dump_decl_set (file, syms); - npart++; - nsyms += n; - } - } - - fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms); -} - - -/* Dump memory partition information to stderr. */ - -void -debug_memory_partitions (void) -{ - dump_memory_partitions (stderr); -}