X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-operands.c;h=aa37b972628c34c28e93060b9e16a1bc1641f09e;hb=6c5a7e57a8527d504955e2802ae230e01ce63622;hp=4fa876de39c9e6d3e399f5d31a4a33618f100631;hpb=91fbe4482a14568d787a93800127a69c84b607b6;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index 4fa876de39c..aa37b972628 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -1,11 +1,12 @@ /* SSA operands management for trees. - Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 + Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) +the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, @@ -14,9 +15,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 51 Franklin Street, Fifth Floor, -Boston, MA 02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -26,6 +26,8 @@ Boston, MA 02110-1301, USA. */ #include "flags.h" #include "function.h" #include "diagnostic.h" +#include "tree-pretty-print.h" +#include "gimple-pretty-print.h" #include "tree-flow.h" #include "tree-inline.h" #include "tree-pass.h" @@ -35,58 +37,83 @@ Boston, MA 02110-1301, USA. */ #include "langhooks.h" #include "ipa-reference.h" -/* This file contains the code required to manage the operands cache of the - SSA optimizer. For every stmt, we maintain an operand cache in the stmt - annotation. This cache contains operands that will be of interest to - optimizers and other passes wishing to manipulate the IL. +/* This file contains the code required to manage the operands cache of the + SSA optimizer. For every stmt, we maintain an operand cache in the stmt + annotation. This cache contains operands that will be of interest to + optimizers and other passes wishing to manipulate the IL. - The operand type are broken up into REAL and VIRTUAL operands. The real - operands are represented as pointers into the stmt's operand tree. Thus + The operand type are broken up into REAL and VIRTUAL operands. The real + operands are represented as pointers into the stmt's operand tree. Thus any manipulation of the real operands will be reflected in the actual tree. - Virtual operands are represented solely in the cache, although the base - variable for the SSA_NAME may, or may not occur in the stmt's tree. + Virtual operands are represented solely in the cache, although the base + variable for the SSA_NAME may, or may not occur in the stmt's tree. Manipulation of the virtual operands will not be reflected in the stmt tree. - The routines in this file are concerned with creating this operand cache + The routines in this file are concerned with creating this operand cache from a stmt tree. - The operand tree is the parsed by the various get_* routines which look - through the stmt tree for the occurrence of operands which may be of - interest, and calls are made to the append_* routines whenever one is - found. There are 5 of these routines, each representing one of the - 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and - Virtual Must Defs. + The operand tree is the parsed by the various get_* routines which look + through the stmt tree for the occurrence of operands which may be of + interest, and calls are made to the append_* routines whenever one is + found. There are 4 of these routines, each representing one of the + 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. - The append_* routines check for duplication, and simply keep a list of + The append_* routines check for duplication, and simply keep a list of unique objects for each operand type in the build_* extendable vectors. - Once the stmt tree is completely parsed, the finalize_ssa_operands() - routine is called, which proceeds to perform the finalization routine - on each of the 5 operand vectors which have been built up. + Once the stmt tree is completely parsed, the finalize_ssa_operands() + routine is called, which proceeds to perform the finalization routine + on each of the 4 operand vectors which have been built up. - If the stmt had a previous operand cache, the finalization routines - attempt to match up the new operands with the old ones. If it's a perfect - match, the old vector is simply reused. If it isn't a perfect match, then - a new vector is created and the new operands are placed there. For - virtual operands, if the previous cache had SSA_NAME version of a - variable, and that same variable occurs in the same operands cache, then + If the stmt had a previous operand cache, the finalization routines + attempt to match up the new operands with the old ones. If it's a perfect + match, the old vector is simply reused. If it isn't a perfect match, then + a new vector is created and the new operands are placed there. For + virtual operands, if the previous cache had SSA_NAME version of a + variable, and that same variable occurs in the same operands cache, then the new cache vector will also get the same SSA_NAME. - i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand - vector for VUSE, then the new vector will also be modified such that - it contains 'a_5' rather than 'a'. */ + i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new + operand vector for VUSE, then the new vector will also be modified + such that it contains 'a_5' rather than 'a'. */ + +/* Structure storing statistics on how many call clobbers we have, and + how many where avoided. */ + +static struct +{ + /* Number of call-clobbered ops we attempt to add to calls in + add_call_clobbered_mem_symbols. */ + unsigned int clobbered_vars; + + /* Number of write-clobbers (VDEFs) avoided by using + not_written information. */ + unsigned int static_write_clobbers_avoided; + + /* Number of reads (VUSEs) avoided by using not_read information. */ + unsigned int static_read_clobbers_avoided; + + /* Number of write-clobbers avoided because the variable can't escape to + this call. */ + unsigned int unescapable_clobbers_avoided; + + /* Number of read-only uses we attempt to add to calls in + add_call_read_mem_symbols. */ + unsigned int readonly_clobbers; + + /* Number of read-only uses we avoid using not_read information. */ + unsigned int static_readonly_clobbers_avoided; +} clobber_stats; + /* Flags to describe operand properties in helpers. */ /* By default, operands are loaded. */ -#define opf_none 0 +#define opf_use 0 -/* Operand is the target of an assignment expression or a +/* Operand is the target of an assignment expression or a call-clobbered variable. */ -#define opf_is_def (1 << 0) - -/* Operand is the target of an assignment expression. */ -#define opf_kill_def (1 << 1) +#define opf_def (1 << 0) /* No virtual operands should be created in the expression. This is used when traversing ADDR_EXPR nodes which have different semantics than @@ -94,12 +121,12 @@ Boston, MA 02110-1301, USA. */ need to consider are indices into arrays. For instance, &a.b[i] should generate a USE of 'i' but it should not generate a VUSE for 'a' nor a VUSE for 'b'. */ -#define opf_no_vops (1 << 2) +#define opf_no_vops (1 << 1) -/* Operand is a "non-specific" kill for call-clobbers and such. This - is used to distinguish "reset the world" events from explicit - MODIFY_EXPRs. */ -#define opf_non_specific (1 << 3) +/* Operand is an implicit reference. This is used to distinguish + explicit assignments in the form of MODIFY_EXPR from + clobbering sites like function calls or ASM_EXPRs. */ +#define opf_implicit (1 << 2) /* Array for building all the def operands. */ static VEC(tree,heap) *build_defs; @@ -107,47 +134,25 @@ static VEC(tree,heap) *build_defs; /* Array for building all the use operands. */ static VEC(tree,heap) *build_uses; -/* Array for building all the V_MAY_DEF operands. */ -static VEC(tree,heap) *build_v_may_defs; - -/* Array for building all the VUSE operands. */ -static VEC(tree,heap) *build_vuses; - -/* Array for building all the V_MUST_DEF operands. */ -static VEC(tree,heap) *build_v_must_defs; +/* The built VDEF operand. */ +static tree build_vdef; -/* These arrays are the cached operand vectors for call clobbered calls. */ -static bool ops_active = false; +/* The built VUSE operand. */ +static tree build_vuse; -static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL; -static unsigned operand_memory_index; +/* Bitmap obstack for our datastructures that needs to survive across + compilations of multiple functions. */ +static bitmap_obstack operands_bitmap_obstack; -static void get_expr_operands (tree, tree *, int); +static void get_expr_operands (gimple, tree *, int); -static def_optype_p free_defs = NULL; -static use_optype_p free_uses = NULL; -static vuse_optype_p free_vuses = NULL; -static maydef_optype_p free_maydefs = NULL; -static mustdef_optype_p free_mustdefs = NULL; - -/* Allocates operand OP of given TYPE from the appropriate free list, - or of the new value if the list is empty. */ - -#define ALLOC_OPTYPE(OP, TYPE) \ - do \ - { \ - TYPE##_optype_p ret = free_##TYPE##s; \ - if (ret) \ - free_##TYPE##s = ret->next; \ - else \ - ret = ssa_operand_alloc (sizeof (*ret)); \ - (OP) = ret; \ - } while (0) +/* Number of functions with initialized ssa_operands. */ +static int n_initialized = 0; /* Return the DECL_UID of the base variable of T. */ static inline unsigned -get_name_decl (tree t) +get_name_decl (const_tree t) { if (TREE_CODE (t) != SSA_NAME) return DECL_UID (t); @@ -156,110 +161,82 @@ get_name_decl (tree t) } -/* Comparison function for qsort used in operand_build_sort_virtual. */ - -static int -operand_build_cmp (const void *p, const void *q) -{ - tree e1 = *((const tree *)p); - tree e2 = *((const tree *)q); - unsigned int u1,u2; - - u1 = get_name_decl (e1); - u2 = get_name_decl (e2); - - /* We want to sort in ascending order. They can never be equal. */ -#ifdef ENABLE_CHECKING - gcc_assert (u1 != u2); -#endif - return (u1 > u2 ? 1 : -1); -} - - -/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */ - -static inline void -operand_build_sort_virtual (VEC(tree,heap) *list) -{ - int num = VEC_length (tree, list); - - if (num < 2) - return; - - if (num == 2) - { - if (get_name_decl (VEC_index (tree, list, 0)) - > get_name_decl (VEC_index (tree, list, 1))) - { - /* Swap elements if in the wrong order. */ - tree tmp = VEC_index (tree, list, 0); - VEC_replace (tree, list, 0, VEC_index (tree, list, 1)); - VEC_replace (tree, list, 1, tmp); - } - return; - } - - /* There are 3 or more elements, call qsort. */ - qsort (VEC_address (tree, list), - VEC_length (tree, list), - sizeof (tree), - operand_build_cmp); -} - - /* Return true if the SSA operands cache is active. */ bool ssa_operands_active (void) { - return ops_active; + /* This function may be invoked from contexts where CFUN is NULL + (IPA passes), return false for now. FIXME: operands may be + active in each individual function, maybe this function should + take CFUN as a parameter. */ + if (cfun == NULL) + return false; + + return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; } -/* Structure storing statistics on how many call clobbers we have, and - how many where avoided. */ +/* Create the VOP variable, an artificial global variable to act as a + representative of all of the virtual operands FUD chain. */ -static struct +static void +create_vop_var (void) { - /* Number of call-clobbered ops we attempt to add to calls in - add_call_clobber_ops. */ - unsigned int clobbered_vars; - - /* Number of write-clobbers (V_MAY_DEFs) avoided by using - not_written information. */ - unsigned int static_write_clobbers_avoided; - - /* Number of reads (VUSEs) avoided by using not_read information. */ - unsigned int static_read_clobbers_avoided; - - /* Number of write-clobbers avoided because the variable can't escape to - this call. */ - unsigned int unescapable_clobbers_avoided; + tree global_var; + + gcc_assert (cfun->gimple_df->vop == NULL_TREE); + + global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, + get_identifier (".MEM"), + void_type_node); + DECL_ARTIFICIAL (global_var) = 1; + TREE_READONLY (global_var) = 0; + DECL_EXTERNAL (global_var) = 1; + TREE_STATIC (global_var) = 1; + TREE_USED (global_var) = 1; + DECL_CONTEXT (global_var) = NULL_TREE; + TREE_THIS_VOLATILE (global_var) = 0; + TREE_ADDRESSABLE (global_var) = 0; + + create_var_ann (global_var); + add_referenced_var (global_var); + cfun->gimple_df->vop = global_var; +} - /* Number of read-only uses we attempt to add to calls in - add_call_read_ops. */ - unsigned int readonly_clobbers; +/* These are the sizes of the operand memory buffer in bytes which gets + allocated each time more operands space is required. The final value is + the amount that is allocated every time after that. + In 1k we can fit 25 use operands (or 63 def operands) on a host with + 8 byte pointers, that would be 10 statements each with 1 def and 2 + uses. */ - /* Number of read-only uses we avoid using not_read information. */ - unsigned int static_readonly_clobbers_avoided; -} clobber_stats; - +#define OP_SIZE_INIT 0 +#define OP_SIZE_1 (1024 - sizeof (void *)) +#define OP_SIZE_2 (1024 * 4 - sizeof (void *)) +#define OP_SIZE_3 (1024 * 16 - sizeof (void *)) /* Initialize the operand cache routines. */ void init_ssa_operands (void) { - build_defs = VEC_alloc (tree, heap, 5); - build_uses = VEC_alloc (tree, heap, 10); - build_vuses = VEC_alloc (tree, heap, 25); - build_v_may_defs = VEC_alloc (tree, heap, 25); - build_v_must_defs = VEC_alloc (tree, heap, 25); - - gcc_assert (operand_memory == NULL); - operand_memory_index = SSA_OPERAND_MEMORY_SIZE; - ops_active = true; + if (!n_initialized++) + { + build_defs = VEC_alloc (tree, heap, 5); + build_uses = VEC_alloc (tree, heap, 10); + build_vuse = NULL_TREE; + build_vdef = NULL_TREE; + bitmap_obstack_initialize (&operands_bitmap_obstack); + } + + gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); + gimple_ssa_operands (cfun)->operand_memory_index + = gimple_ssa_operands (cfun)->ssa_operand_mem_size; + gimple_ssa_operands (cfun)->ops_active = true; memset (&clobber_stats, 0, sizeof (clobber_stats)); + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; + create_vop_var (); } @@ -269,683 +246,321 @@ void fini_ssa_operands (void) { struct ssa_operand_memory_d *ptr; - VEC_free (tree, heap, build_defs); - VEC_free (tree, heap, build_uses); - VEC_free (tree, heap, build_v_must_defs); - VEC_free (tree, heap, build_v_may_defs); - VEC_free (tree, heap, build_vuses); - free_defs = NULL; - free_uses = NULL; - free_vuses = NULL; - free_maydefs = NULL; - free_mustdefs = NULL; - while ((ptr = operand_memory) != NULL) + + if (!--n_initialized) + { + VEC_free (tree, heap, build_defs); + VEC_free (tree, heap, build_uses); + build_vdef = NULL_TREE; + build_vuse = NULL_TREE; + } + + gimple_ssa_operands (cfun)->free_defs = NULL; + gimple_ssa_operands (cfun)->free_uses = NULL; + + while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) { - operand_memory = operand_memory->next; + gimple_ssa_operands (cfun)->operand_memory + = gimple_ssa_operands (cfun)->operand_memory->next; ggc_free (ptr); } - ops_active = false; - + gimple_ssa_operands (cfun)->ops_active = false; + + if (!n_initialized) + bitmap_obstack_release (&operands_bitmap_obstack); + + cfun->gimple_df->vop = NULL_TREE; + if (dump_file && (dump_flags & TDF_STATS)) { - fprintf (dump_file, "Original clobbered vars:%d\n", + fprintf (dump_file, "Original clobbered vars: %d\n", clobber_stats.clobbered_vars); - fprintf (dump_file, "Static write clobbers avoided:%d\n", + fprintf (dump_file, "Static write clobbers avoided: %d\n", clobber_stats.static_write_clobbers_avoided); - fprintf (dump_file, "Static read clobbers avoided:%d\n", + fprintf (dump_file, "Static read clobbers avoided: %d\n", clobber_stats.static_read_clobbers_avoided); - fprintf (dump_file, "Unescapable clobbers avoided:%d\n", + fprintf (dump_file, "Unescapable clobbers avoided: %d\n", clobber_stats.unescapable_clobbers_avoided); - fprintf (dump_file, "Original read-only clobbers:%d\n", + fprintf (dump_file, "Original read-only clobbers: %d\n", clobber_stats.readonly_clobbers); - fprintf (dump_file, "Static read-only clobbers avoided:%d\n", + fprintf (dump_file, "Static read-only clobbers avoided: %d\n", clobber_stats.static_readonly_clobbers_avoided); } } -/* Return memory for operands of SIZE chunks. */ - +/* Return memory for an operand of size SIZE. */ + static inline void * ssa_operand_alloc (unsigned size) { char *ptr; - if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE) + + gcc_assert (size == sizeof (struct use_optype_d) + || size == sizeof (struct def_optype_d)); + + if (gimple_ssa_operands (cfun)->operand_memory_index + size + >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) { struct ssa_operand_memory_d *ptr; - ptr = GGC_NEW (struct ssa_operand_memory_d); - ptr->next = operand_memory; - operand_memory = ptr; - operand_memory_index = 0; + + switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) + { + case OP_SIZE_INIT: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; + break; + case OP_SIZE_1: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; + break; + case OP_SIZE_2: + case OP_SIZE_3: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; + break; + default: + gcc_unreachable (); + } + + ptr = (struct ssa_operand_memory_d *) + ggc_alloc (sizeof (void *) + + gimple_ssa_operands (cfun)->ssa_operand_mem_size); + ptr->next = gimple_ssa_operands (cfun)->operand_memory; + gimple_ssa_operands (cfun)->operand_memory = ptr; + gimple_ssa_operands (cfun)->operand_memory_index = 0; } - ptr = &(operand_memory->mem[operand_memory_index]); - operand_memory_index += size; + + ptr = &(gimple_ssa_operands (cfun)->operand_memory + ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); + gimple_ssa_operands (cfun)->operand_memory_index += size; return ptr; } +/* Allocate a DEF operand. */ -/* This routine makes sure that PTR is in an immediate use list, and makes - sure the stmt pointer is set to the current stmt. */ - -static inline void -set_virtual_use_link (use_operand_p ptr, tree stmt) +static inline struct def_optype_d * +alloc_def (void) { - /* fold_stmt may have changed the stmt pointers. */ - if (ptr->stmt != stmt) - ptr->stmt = stmt; - - /* If this use isn't in a list, add it to the correct list. */ - if (!ptr->prev) - link_imm_use (ptr, *(ptr->use)); + struct def_optype_d *ret; + if (gimple_ssa_operands (cfun)->free_defs) + { + ret = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs + = gimple_ssa_operands (cfun)->free_defs->next; + } + else + ret = (struct def_optype_d *) + ssa_operand_alloc (sizeof (struct def_optype_d)); + return ret; } -/* Appends ELT after TO, and moves the TO pointer to ELT. */ - -#define APPEND_OP_AFTER(ELT, TO) \ - do \ - { \ - (TO)->next = (ELT); \ - (TO) = (ELT); \ - } while (0) - -/* Appends head of list FROM after TO, and move both pointers - to their successors. */ - -#define MOVE_HEAD_AFTER(FROM, TO) \ - do \ - { \ - APPEND_OP_AFTER (FROM, TO); \ - (FROM) = (FROM)->next; \ - } while (0) - -/* Moves OP to appropriate freelist. OP is set to its successor. */ - -#define MOVE_HEAD_TO_FREELIST(OP, TYPE) \ - do \ - { \ - TYPE##_optype_p next = (OP)->next; \ - (OP)->next = free_##TYPE##s; \ - free_##TYPE##s = (OP); \ - (OP) = next; \ - } while (0) - -/* Initializes immediate use at USE_PTR to value VAL, and links it to the list - of immediate uses. STMT is the current statement. */ - -#define INITIALIZE_USE(USE_PTR, VAL, STMT) \ - do \ - { \ - (USE_PTR)->use = (VAL); \ - link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \ - } while (0) - -/* Adds OP to the list of defs after LAST, and moves - LAST to the new element. */ - -static inline void -add_def_op (tree *op, def_optype_p *last) -{ - def_optype_p new; - - ALLOC_OPTYPE (new, def); - DEF_OP_PTR (new) = op; - APPEND_OP_AFTER (new, *last); -} -/* Adds OP to the list of uses of statement STMT after LAST, and moves - LAST to the new element. */ +/* Allocate a USE operand. */ -static inline void -add_use_op (tree stmt, tree *op, use_optype_p *last) +static inline struct use_optype_d * +alloc_use (void) { - use_optype_p new; - - ALLOC_OPTYPE (new, use); - INITIALIZE_USE (USE_OP_PTR (new), op, stmt); - APPEND_OP_AFTER (new, *last); + struct use_optype_d *ret; + if (gimple_ssa_operands (cfun)->free_uses) + { + ret = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses + = gimple_ssa_operands (cfun)->free_uses->next; + } + else + ret = (struct use_optype_d *) + ssa_operand_alloc (sizeof (struct use_optype_d)); + return ret; } -/* Adds OP to the list of vuses of statement STMT after LAST, and moves - LAST to the new element. */ -static inline void -add_vuse_op (tree stmt, tree op, vuse_optype_p *last) +/* Adds OP to the list of defs after LAST. */ + +static inline def_optype_p +add_def_op (tree *op, def_optype_p last) { - vuse_optype_p new; + def_optype_p new_def; - ALLOC_OPTYPE (new, vuse); - VUSE_OP (new) = op; - INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt); - APPEND_OP_AFTER (new, *last); + new_def = alloc_def (); + DEF_OP_PTR (new_def) = op; + last->next = new_def; + new_def->next = NULL; + return new_def; } -/* Adds OP to the list of maydefs of statement STMT after LAST, and moves - LAST to the new element. */ -static inline void -add_maydef_op (tree stmt, tree op, maydef_optype_p *last) -{ - maydef_optype_p new; +/* Adds OP to the list of uses of statement STMT after LAST. */ - ALLOC_OPTYPE (new, maydef); - MAYDEF_RESULT (new) = op; - MAYDEF_OP (new) = op; - INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt); - APPEND_OP_AFTER (new, *last); +static inline use_optype_p +add_use_op (gimple stmt, tree *op, use_optype_p last) +{ + use_optype_p new_use; + + new_use = alloc_use (); + USE_OP_PTR (new_use)->use = op; + link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); + last->next = new_use; + new_use->next = NULL; + return new_use; } -/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves - LAST to the new element. */ -static inline void -add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last) -{ - mustdef_optype_p new; - - ALLOC_OPTYPE (new, mustdef); - MUSTDEF_RESULT (new) = op; - MUSTDEF_KILL (new) = op; - INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt); - APPEND_OP_AFTER (new, *last); -} /* Takes elements from build_defs and turns them into def operands of STMT. - TODO -- Given that def operands list is not necessarily sorted, merging - the operands this way does not make much sense. - -- Make build_defs VEC of tree *. */ + TODO -- Make build_defs VEC of tree *. */ static inline void -finalize_ssa_def_ops (tree stmt) +finalize_ssa_defs (gimple stmt) { unsigned new_i; struct def_optype_d new_list; def_optype_p old_ops, last; - tree *old_base; + unsigned int num = VEC_length (tree, build_defs); + + /* There should only be a single real definition per assignment. */ + gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); + + /* Pre-pend the vdef we may have built. */ + if (build_vdef != NULL_TREE) + { + tree oldvdef = gimple_vdef (stmt); + if (oldvdef + && TREE_CODE (oldvdef) == SSA_NAME) + oldvdef = SSA_NAME_VAR (oldvdef); + if (oldvdef != build_vdef) + gimple_set_vdef (stmt, build_vdef); + VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt)); + ++num; + } new_list.next = NULL; last = &new_list; - old_ops = DEF_OPS (stmt); + old_ops = gimple_def_ops (stmt); new_i = 0; - while (old_ops && new_i < VEC_length (tree, build_defs)) - { - tree *new_base = (tree *) VEC_index (tree, build_defs, new_i); - old_base = DEF_OP_PTR (old_ops); - if (old_base == new_base) - { - /* if variables are the same, reuse this node. */ - MOVE_HEAD_AFTER (old_ops, last); - new_i++; - } - else if (old_base < new_base) - { - /* if old is less than new, old goes to the free list. */ - MOVE_HEAD_TO_FREELIST (old_ops, def); - } - else + /* Clear and unlink a no longer necessary VDEF. */ + if (build_vdef == NULL_TREE + && gimple_vdef (stmt) != NULL_TREE) + { + if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) { - /* This is a new operand. */ - add_def_op (new_base, &last); - new_i++; + unlink_stmt_vdef (stmt); + release_ssa_name (gimple_vdef (stmt)); } + gimple_set_vdef (stmt, NULL_TREE); } - /* If there is anything remaining in the build_defs list, simply emit it. */ - for ( ; new_i < VEC_length (tree, build_defs); new_i++) - add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last); + /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ + if (gimple_vdef (stmt) + && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) + mark_sym_for_renaming (gimple_vdef (stmt)); - last->next = NULL; + /* Check for the common case of 1 def that hasn't changed. */ + if (old_ops && old_ops->next == NULL && num == 1 + && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) + return; /* If there is anything in the old list, free it. */ if (old_ops) { - old_ops->next = free_defs; - free_defs = old_ops; + old_ops->next = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs = old_ops; } - /* Now set the stmt's operands. */ - DEF_OPS (stmt) = new_list.next; + /* If there is anything remaining in the build_defs list, simply emit it. */ + for ( ; new_i < num; new_i++) + last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); -#ifdef ENABLE_CHECKING - { - def_optype_p ptr; - unsigned x = 0; - for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next) - x++; - - gcc_assert (x == VEC_length (tree, build_defs)); - } -#endif + /* Now set the stmt's operands. */ + gimple_set_def_ops (stmt, new_list.next); } -/* This routine will create stmt operands for STMT from the def build list. */ - -static void -finalize_ssa_defs (tree stmt) -{ - unsigned int num = VEC_length (tree, build_defs); - - /* There should only be a single real definition per assignment. */ - gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1); - - /* If there is an old list, often the new list is identical, or close, so - find the elements at the beginning that are the same as the vector. */ - finalize_ssa_def_ops (stmt); - VEC_truncate (tree, build_defs, 0); -} /* Takes elements from build_uses and turns them into use operands of STMT. TODO -- Make build_uses VEC of tree *. */ static inline void -finalize_ssa_use_ops (tree stmt) +finalize_ssa_uses (gimple stmt) { unsigned new_i; struct use_optype_d new_list; use_optype_p old_ops, ptr, last; - new_list.next = NULL; - last = &new_list; - - old_ops = USE_OPS (stmt); - - /* If there is anything in the old list, free it. */ - if (old_ops) + /* Pre-pend the VUSE we may have built. */ + if (build_vuse != NULL_TREE) { - for (ptr = old_ops; ptr; ptr = ptr->next) - delink_imm_use (USE_OP_PTR (ptr)); - old_ops->next = free_uses; - free_uses = old_ops; + tree oldvuse = gimple_vuse (stmt); + if (oldvuse + && TREE_CODE (oldvuse) == SSA_NAME) + oldvuse = SSA_NAME_VAR (oldvuse); + if (oldvuse != (build_vuse != NULL_TREE + ? build_vuse : build_vdef)) + gimple_set_vuse (stmt, NULL_TREE); + VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt)); } - /* Now create nodes for all the new nodes. */ - for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) - add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last); - - last->next = NULL; - - /* Now set the stmt's operands. */ - USE_OPS (stmt) = new_list.next; - -#ifdef ENABLE_CHECKING - { - unsigned x = 0; - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) - x++; - - gcc_assert (x == VEC_length (tree, build_uses)); - } -#endif -} - -/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */ - -static void -finalize_ssa_uses (tree stmt) -{ -#ifdef ENABLE_CHECKING - { - unsigned x; - unsigned num = VEC_length (tree, build_uses); - - /* If the pointer to the operand is the statement itself, something is - wrong. It means that we are pointing to a local variable (the - initial call to update_stmt_operands does not pass a pointer to a - statement). */ - for (x = 0; x < num; x++) - gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt); - } -#endif - finalize_ssa_use_ops (stmt); - VEC_truncate (tree, build_uses, 0); -} - - -/* Takes elements from build_v_may_defs and turns them into maydef operands of - STMT. */ - -static inline void -finalize_ssa_v_may_def_ops (tree stmt) -{ - unsigned new_i; - struct maydef_optype_d new_list; - maydef_optype_p old_ops, ptr, last; - tree act; - unsigned old_base, new_base; - new_list.next = NULL; last = &new_list; - old_ops = MAYDEF_OPS (stmt); - - new_i = 0; - while (old_ops && new_i < VEC_length (tree, build_v_may_defs)) - { - act = VEC_index (tree, build_v_may_defs, new_i); - new_base = get_name_decl (act); - old_base = get_name_decl (MAYDEF_OP (old_ops)); - - if (old_base == new_base) - { - /* if variables are the same, reuse this node. */ - MOVE_HEAD_AFTER (old_ops, last); - set_virtual_use_link (MAYDEF_OP_PTR (last), stmt); - new_i++; - } - else if (old_base < new_base) - { - /* if old is less than new, old goes to the free list. */ - delink_imm_use (MAYDEF_OP_PTR (old_ops)); - MOVE_HEAD_TO_FREELIST (old_ops, maydef); - } - else - { - /* This is a new operand. */ - add_maydef_op (stmt, act, &last); - new_i++; - } - } - - /* If there is anything remaining in the build_v_may_defs list, simply emit it. */ - for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++) - add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last); + old_ops = gimple_use_ops (stmt); - last->next = NULL; + /* Clear a no longer necessary VUSE. */ + if (build_vuse == NULL_TREE + && gimple_vuse (stmt) != NULL_TREE) + gimple_set_vuse (stmt, NULL_TREE); /* If there is anything in the old list, free it. */ if (old_ops) { for (ptr = old_ops; ptr; ptr = ptr->next) - delink_imm_use (MAYDEF_OP_PTR (ptr)); - old_ops->next = free_maydefs; - free_maydefs = old_ops; + delink_imm_use (USE_OP_PTR (ptr)); + old_ops->next = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses = old_ops; } - /* Now set the stmt's operands. */ - MAYDEF_OPS (stmt) = new_list.next; - -#ifdef ENABLE_CHECKING - { - unsigned x = 0; - for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next) - x++; - - gcc_assert (x == VEC_length (tree, build_v_may_defs)); - } -#endif -} - -static void -finalize_ssa_v_may_defs (tree stmt) -{ - finalize_ssa_v_may_def_ops (stmt); -} - - -/* Clear the in_list bits and empty the build array for V_MAY_DEFs. */ - -static inline void -cleanup_v_may_defs (void) -{ - unsigned x, num; - num = VEC_length (tree, build_v_may_defs); - - for (x = 0; x < num; x++) + /* If we added a VUSE, make sure to set the operand if it is not already + present and mark it for renaming. */ + if (build_vuse != NULL_TREE + && gimple_vuse (stmt) == NULL_TREE) { - tree t = VEC_index (tree, build_v_may_defs, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_v_may_def_list = 0; - } + gimple_set_vuse (stmt, gimple_vop (cfun)); + mark_sym_for_renaming (gimple_vop (cfun)); } - VEC_truncate (tree, build_v_may_defs, 0); -} - -/* Takes elements from build_vuses and turns them into vuse operands of - STMT. */ - -static inline void -finalize_ssa_vuse_ops (tree stmt) -{ - unsigned new_i; - struct vuse_optype_d new_list; - vuse_optype_p old_ops, ptr, last; - tree act; - unsigned old_base, new_base; - - new_list.next = NULL; - last = &new_list; - - old_ops = VUSE_OPS (stmt); - - new_i = 0; - while (old_ops && new_i < VEC_length (tree, build_vuses)) - { - act = VEC_index (tree, build_vuses, new_i); - new_base = get_name_decl (act); - old_base = get_name_decl (VUSE_OP (old_ops)); - - if (old_base == new_base) - { - /* if variables are the same, reuse this node. */ - MOVE_HEAD_AFTER (old_ops, last); - set_virtual_use_link (VUSE_OP_PTR (last), stmt); - new_i++; - } - else if (old_base < new_base) - { - /* if old is less than new, old goes to the free list. */ - delink_imm_use (USE_OP_PTR (old_ops)); - MOVE_HEAD_TO_FREELIST (old_ops, vuse); - } - else - { - /* This is a new operand. */ - add_vuse_op (stmt, act, &last); - new_i++; - } - } - - /* If there is anything remaining in the build_vuses list, simply emit it. */ - for ( ; new_i < VEC_length (tree, build_vuses); new_i++) - add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last); - - last->next = NULL; - - /* If there is anything in the old list, free it. */ - if (old_ops) - { - for (ptr = old_ops; ptr; ptr = ptr->next) - delink_imm_use (VUSE_OP_PTR (ptr)); - old_ops->next = free_vuses; - free_vuses = old_ops; - } + /* Now create nodes for all the new nodes. */ + for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) + last = add_use_op (stmt, + (tree *) VEC_index (tree, build_uses, new_i), + last); /* Now set the stmt's operands. */ - VUSE_OPS (stmt) = new_list.next; - -#ifdef ENABLE_CHECKING - { - unsigned x = 0; - for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next) - x++; - - gcc_assert (x == VEC_length (tree, build_vuses)); - } -#endif + gimple_set_use_ops (stmt, new_list.next); } - -/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */ - -static void -finalize_ssa_vuses (tree stmt) -{ - unsigned num, num_v_may_defs; - unsigned vuse_index; - - /* Remove superfluous VUSE operands. If the statement already has a - V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is - not needed because V_MAY_DEFs imply a VUSE of the variable. For - instance, suppose that variable 'a' is aliased: - # VUSE - # a_3 = V_MAY_DEF - a = a + 1; - The VUSE is superfluous because it is implied by the - V_MAY_DEF operation. */ - num = VEC_length (tree, build_vuses); - num_v_may_defs = VEC_length (tree, build_v_may_defs); - - if (num > 0 && num_v_may_defs > 0) - { - for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); ) - { - tree vuse; - vuse = VEC_index (tree, build_vuses, vuse_index); - if (TREE_CODE (vuse) != SSA_NAME) - { - var_ann_t ann = var_ann (vuse); - ann->in_vuse_list = 0; - if (ann->in_v_may_def_list) - { - VEC_ordered_remove (tree, build_vuses, vuse_index); - continue; - } - } - vuse_index++; - } - } - else - { - /* Clear out the in_list bits. */ - for (vuse_index = 0; - vuse_index < VEC_length (tree, build_vuses); - vuse_index++) - { - tree t = VEC_index (tree, build_vuses, vuse_index); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_vuse_list = 0; - } - } - } - - finalize_ssa_vuse_ops (stmt); - - /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */ - cleanup_v_may_defs (); - - /* Free the VUSEs build vector. */ - VEC_truncate (tree, build_vuses, 0); - -} - -/* Takes elements from build_v_must_defs and turns them into mustdef operands of - STMT. */ +/* Clear the in_list bits and empty the build array for VDEFs and + VUSEs. */ static inline void -finalize_ssa_v_must_def_ops (tree stmt) -{ - unsigned new_i; - struct mustdef_optype_d new_list; - mustdef_optype_p old_ops, ptr, last; - tree act; - unsigned old_base, new_base; - - new_list.next = NULL; - last = &new_list; - - old_ops = MUSTDEF_OPS (stmt); - - new_i = 0; - while (old_ops && new_i < VEC_length (tree, build_v_must_defs)) - { - act = VEC_index (tree, build_v_must_defs, new_i); - new_base = get_name_decl (act); - old_base = get_name_decl (MUSTDEF_KILL (old_ops)); - - if (old_base == new_base) - { - /* If variables are the same, reuse this node. */ - MOVE_HEAD_AFTER (old_ops, last); - set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt); - new_i++; - } - else if (old_base < new_base) - { - /* If old is less than new, old goes to the free list. */ - delink_imm_use (MUSTDEF_KILL_PTR (old_ops)); - MOVE_HEAD_TO_FREELIST (old_ops, mustdef); - } - else - { - /* This is a new operand. */ - add_mustdef_op (stmt, act, &last); - new_i++; - } - } - - /* If there is anything remaining in the build_v_must_defs list, simply emit it. */ - for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++) - add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last); - - last->next = NULL; - - /* If there is anything in the old list, free it. */ - if (old_ops) - { - for (ptr = old_ops; ptr; ptr = ptr->next) - delink_imm_use (MUSTDEF_KILL_PTR (ptr)); - old_ops->next = free_mustdefs; - free_mustdefs = old_ops; - } - - /* Now set the stmt's operands. */ - MUSTDEF_OPS (stmt) = new_list.next; - -#ifdef ENABLE_CHECKING - { - unsigned x = 0; - for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next) - x++; - - gcc_assert (x == VEC_length (tree, build_v_must_defs)); - } -#endif -} - -static void -finalize_ssa_v_must_defs (tree stmt) +cleanup_build_arrays (void) { - /* In the presence of subvars, there may be more than one V_MUST_DEF - per statement (one for each subvar). It is a bit expensive to - verify that all must-defs in a statement belong to subvars if - there is more than one must-def, so we don't do it. Suffice to - say, if you reach here without having subvars, and have num >1, - you have hit a bug. */ - finalize_ssa_v_must_def_ops (stmt); - VEC_truncate (tree, build_v_must_defs, 0); + build_vdef = NULL_TREE; + build_vuse = NULL_TREE; + VEC_truncate (tree, build_defs, 0); + VEC_truncate (tree, build_uses, 0); } /* Finalize all the build vectors, fill the new ones into INFO. */ - + static inline void -finalize_ssa_stmt_operands (tree stmt) +finalize_ssa_stmt_operands (gimple stmt) { finalize_ssa_defs (stmt); finalize_ssa_uses (stmt); - finalize_ssa_v_must_defs (stmt); - finalize_ssa_v_may_defs (stmt); - finalize_ssa_vuses (stmt); + cleanup_build_arrays (); } @@ -956,9 +571,8 @@ start_ssa_stmt_operands (void) { gcc_assert (VEC_length (tree, build_defs) == 0); gcc_assert (VEC_length (tree, build_uses) == 0); - gcc_assert (VEC_length (tree, build_vuses) == 0); - gcc_assert (VEC_length (tree, build_v_may_defs) == 0); - gcc_assert (VEC_length (tree, build_v_must_defs) == 0); + gcc_assert (build_vuse == NULL_TREE); + gcc_assert (build_vdef == NULL_TREE); } @@ -967,7 +581,7 @@ start_ssa_stmt_operands (void) static inline void append_def (tree *def_p) { - VEC_safe_push (tree, heap, build_defs, (tree)def_p); + VEC_safe_push (tree, heap, build_defs, (tree) def_p); } @@ -976,753 +590,205 @@ append_def (tree *def_p) static inline void append_use (tree *use_p) { - VEC_safe_push (tree, heap, build_uses, (tree)use_p); -} - - -/* Add a new virtual may def for variable VAR to the build array. */ - -static inline void -append_v_may_def (tree var) -{ - if (TREE_CODE (var) != SSA_NAME) - { - var_ann_t ann = get_var_ann (var); - - /* Don't allow duplicate entries. */ - if (ann->in_v_may_def_list) - return; - ann->in_v_may_def_list = 1; - } - - VEC_safe_push (tree, heap, build_v_may_defs, (tree)var); -} - - -/* Add VAR to the list of virtual uses. */ - -static inline void -append_vuse (tree var) -{ - /* Don't allow duplicate entries. */ - if (TREE_CODE (var) != SSA_NAME) - { - var_ann_t ann = get_var_ann (var); - - if (ann->in_vuse_list || ann->in_v_may_def_list) - return; - ann->in_vuse_list = 1; - } - - VEC_safe_push (tree, heap, build_vuses, (tree)var); + VEC_safe_push (tree, heap, build_uses, (tree) use_p); } -/* Add VAR to the list of virtual must definitions for INFO. */ +/* Add VAR to the set of variables that require a VDEF operator. */ static inline void -append_v_must_def (tree var) +append_vdef (tree var) { - unsigned i; - - /* Don't allow duplicate entries. */ - for (i = 0; i < VEC_length (tree, build_v_must_defs); i++) - if (var == VEC_index (tree, build_v_must_defs, i)) - return; - - VEC_safe_push (tree, heap, build_v_must_defs, (tree)var); -} - - -/* REF is a tree that contains the entire pointer dereference - expression, if available, or NULL otherwise. ALIAS is the variable - we are asking if REF can access. OFFSET and SIZE come from the - memory access expression that generated this virtual operand. */ - -static bool -access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, - HOST_WIDE_INT size) -{ - bool offsetgtz = offset > 0; - unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset; - tree base = ref ? get_base_address (ref) : NULL; - - /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be - using a call-clobbered memory tag. By definition, call-clobbered - memory tags can always touch .GLOBAL_VAR. */ - if (alias == global_var) - return true; - - /* We cannot prune nonlocal aliases because they are not type - specific. */ - if (alias == nonlocal_all) - return true; - - /* If ALIAS is an SFT, it can't be touched if the offset - and size of the access is not overlapping with the SFT offset and - size. This is only true if we are accessing through a pointer - to a type that is the same as SFT_PARENT_VAR. Otherwise, we may - be accessing through a pointer to some substruct of the - structure, and if we try to prune there, we will have the wrong - offset, and get the wrong answer. - i.e., we can't prune without more work if we have something like - - struct gcc_target - { - struct asm_out - { - const char *byte_op; - struct asm_int_op - { - const char *hi; - } aligned_op; - } asm_out; - } targetm; - - foo = &targetm.asm_out.aligned_op; - return foo->hi; - - SFT.1, which represents hi, will have SFT_OFFSET=32 because in - terms of SFT_PARENT_VAR, that is where it is. - However, the access through the foo pointer will be at offset 0. */ - if (size != -1 - && TREE_CODE (alias) == STRUCT_FIELD_TAG - && base - && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias)) - && !overlap_subvar (offset, size, alias, NULL)) - { -#ifdef ACCESS_DEBUGGING - fprintf (stderr, "Access to "); - print_generic_expr (stderr, ref, 0); - fprintf (stderr, " may not touch "); - print_generic_expr (stderr, alias, 0); - fprintf (stderr, " in function %s\n", get_name (current_function_decl)); -#endif - return false; - } - - /* Without strict aliasing, it is impossible for a component access - through a pointer to touch a random variable, unless that - variable *is* a structure or a pointer. - - That is, given p->c, and some random global variable b, - there is no legal way that p->c could be an access to b. - - Without strict aliasing on, we consider it legal to do something - like: - - struct foos { int l; }; - int foo; - static struct foos *getfoo(void); - int main (void) - { - struct foos *f = getfoo(); - f->l = 1; - foo = 2; - if (f->l == 1) - abort(); - exit(0); - } - static struct foos *getfoo(void) - { return (struct foos *)&foo; } - - (taken from 20000623-1.c) - - The docs also say/imply that access through union pointers - is legal (but *not* if you take the address of the union member, - i.e. the inverse), such that you can do - - typedef union { - int d; - } U; - - int rv; - void breakme() - { - U *rv0; - U *pretmp = (U*)&rv; - rv0 = pretmp; - rv0->d = 42; - } - To implement this, we just punt on accesses through union - pointers entirely. - */ - else if (ref - && flag_strict_aliasing - && TREE_CODE (ref) != INDIRECT_REF - && !MTAG_P (alias) - && (TREE_CODE (base) != INDIRECT_REF - || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE) - && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) - && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE - && !POINTER_TYPE_P (TREE_TYPE (alias)) - /* When the struct has may_alias attached to it, we need not to - return true. */ - && get_alias_set (base)) - { -#ifdef ACCESS_DEBUGGING - fprintf (stderr, "Access to "); - print_generic_expr (stderr, ref, 0); - fprintf (stderr, " may not touch "); - print_generic_expr (stderr, alias, 0); - fprintf (stderr, " in function %s\n", get_name (current_function_decl)); -#endif - return false; - } + if (!optimize) + return; - /* If the offset of the access is greater than the size of one of - the possible aliases, it can't be touching that alias, because it - would be past the end of the structure. */ - else if (ref - && flag_strict_aliasing - && TREE_CODE (ref) != INDIRECT_REF - && !MTAG_P (alias) - && !POINTER_TYPE_P (TREE_TYPE (alias)) - && offsetgtz - && DECL_SIZE (alias) - && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST - && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias))) - { -#ifdef ACCESS_DEBUGGING - fprintf (stderr, "Access to "); - print_generic_expr (stderr, ref, 0); - fprintf (stderr, " may not touch "); - print_generic_expr (stderr, alias, 0); - fprintf (stderr, " in function %s\n", get_name (current_function_decl)); -#endif - return false; - } + gcc_assert ((build_vdef == NULL_TREE + || build_vdef == var) + && (build_vuse == NULL_TREE + || build_vuse == var)); - return true; + build_vdef = var; + build_vuse = var; } -/* Add VAR to the virtual operands array. FLAGS is as in - get_expr_operands. FULL_REF is a tree that contains the entire - pointer dereference expression, if available, or NULL otherwise. - OFFSET and SIZE come from the memory access expression that - generated this virtual operand. FOR_CLOBBER is true is this is - adding a virtual operand for a call clobber. */ +/* Add VAR to the set of variables that require a VUSE operator. */ -static void -add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, - tree full_ref, HOST_WIDE_INT offset, - HOST_WIDE_INT size, bool for_clobber) +static inline void +append_vuse (tree var) { - VEC(tree,gc) *aliases; - tree sym; - var_ann_t v_ann; - - sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); - v_ann = var_ann (sym); - - /* Mark statements with volatile operands. Optimizers should back - off from statements having volatile operands. */ - if (TREE_THIS_VOLATILE (sym) && s_ann) - s_ann->has_volatile_ops = true; - - /* If the variable cannot be modified and this is a V_MAY_DEF change - it into a VUSE. This happens when read-only variables are marked - call-clobbered and/or aliased to writable variables. So we only - check that this only happens on non-specific stores. - - Note that if this is a specific store, i.e. associated with a - modify_expr, then we can't suppress the V_MAY_DEF, lest we run - into validation problems. - - This can happen when programs cast away const, leaving us with a - store to read-only memory. If the statement is actually executed - at runtime, then the program is ill formed. If the statement is - not executed then all is well. At the very least, we cannot ICE. */ - if ((flags & opf_non_specific) && unmodifiable_var_p (var)) - flags &= ~(opf_is_def | opf_kill_def); - - /* The variable is not a GIMPLE register. Add it (or its aliases) to - virtual operands, unless the caller has specifically requested - not to add virtual operands (used when adding operands inside an + if (!optimize) + return; + + gcc_assert (build_vuse == NULL_TREE + || build_vuse == var); + + build_vuse = var; +} + +/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ + +static void +add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) +{ + /* Add virtual operands to the stmt, unless the caller has specifically + requested not to do that (used when adding operands inside an ADDR_EXPR expression). */ if (flags & opf_no_vops) return; - - aliases = v_ann->may_aliases; - if (aliases == NULL) - { - /* The variable is not aliased or it is an alias tag. */ - if (flags & opf_is_def) - { - if (flags & opf_kill_def) - { - /* V_MUST_DEF for non-aliased, non-GIMPLE register - variable definitions. */ - gcc_assert (!MTAG_P (var) - || TREE_CODE (var) == STRUCT_FIELD_TAG); - append_v_must_def (var); - } - else - { - /* Add a V_MAY_DEF for call-clobbered variables and - memory tags. */ - append_v_may_def (var); - } - } - else - append_vuse (var); - } + + gcc_assert (!is_gimple_debug (stmt)); + + if (flags & opf_def) + append_vdef (gimple_vop (cfun)); else - { - unsigned i; - tree al; - - /* The variable is aliased. Add its aliases to the virtual - operands. */ - gcc_assert (VEC_length (tree, aliases) != 0); - - if (flags & opf_is_def) - { - - bool none_added = true; - - for (i = 0; VEC_iterate (tree, aliases, i, al); i++) - { - if (!access_can_touch_variable (full_ref, al, offset, size)) - continue; - - none_added = false; - append_v_may_def (al); - } - - /* If the variable is also an alias tag, add a virtual - operand for it, otherwise we will miss representing - references to the members of the variable's alias set. - This fixes the bug in gcc.c-torture/execute/20020503-1.c. - - It is also necessary to add bare defs on clobbers for - SMT's, so that bare SMT uses caused by pruning all the - aliases will link up properly with calls. In order to - keep the number of these bare defs we add down to the - minimum necessary, we keep track of which SMT's were used - alone in statement vdefs or VUSEs. */ - if (v_ann->is_aliased - || none_added - || (TREE_CODE (var) == SYMBOL_MEMORY_TAG - && for_clobber - && SMT_USED_ALONE (var))) - { - /* Every bare SMT def we add should have SMT_USED_ALONE - set on it, or else we will get the wrong answer on - clobbers. */ - if (none_added - && !updating_used_alone && aliases_computed_p - && TREE_CODE (var) == SYMBOL_MEMORY_TAG) - gcc_assert (SMT_USED_ALONE (var)); - - append_v_may_def (var); - } - } - else - { - bool none_added = true; - for (i = 0; VEC_iterate (tree, aliases, i, al); i++) - { - if (!access_can_touch_variable (full_ref, al, offset, size)) - continue; - none_added = false; - append_vuse (al); - } - - /* Similarly, append a virtual uses for VAR itself, when - it is an alias tag. */ - if (v_ann->is_aliased || none_added) - append_vuse (var); - } - } + append_vuse (gimple_vop (cfun)); } -/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in - get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to - the statement's real operands, otherwise it is added to virtual - operands. */ +/* Add *VAR_P to the appropriate operand array for statement STMT. + FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, + it will be added to the statement's real operands, otherwise it is + added to virtual operands. */ static void -add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) +add_stmt_operand (tree *var_p, gimple stmt, int flags) { - bool is_real_op; tree var, sym; - var_ann_t v_ann; - - var = *var_p; - gcc_assert (SSA_VAR_P (var)); - is_real_op = is_gimple_reg (var); - - /* If this is a real operand, the operand is either an SSA name or a - decl. Virtual operands may only be decls. */ - gcc_assert (is_real_op || DECL_P (var)); + gcc_assert (SSA_VAR_P (*var_p)); + var = *var_p; sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); - v_ann = var_ann (sym); - /* Mark statements with volatile operands. Optimizers should back - off from statements having volatile operands. */ - if (TREE_THIS_VOLATILE (sym) && s_ann) - s_ann->has_volatile_ops = true; + /* Mark statements with volatile operands. */ + if (TREE_THIS_VOLATILE (sym)) + gimple_set_has_volatile_ops (stmt, true); - if (is_real_op) + if (is_gimple_reg (sym)) { /* The variable is a GIMPLE register. Add it to real operands. */ - if (flags & opf_is_def) + if (flags & opf_def) append_def (var_p); else append_use (var_p); } else - add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false); + add_virtual_operand (stmt, flags); +} + +/* Mark the base address of REF as having its address taken. + REF may be a single variable whose address has been taken or any + other valid GIMPLE memory reference (structure reference, array, + etc). */ + +static void +mark_address_taken (tree ref) +{ + tree var; + + /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF + as the only thing we take the address of. If VAR is a structure, + taking the address of a field means that the whole structure may + be referenced using pointer arithmetic. See PR 21407 and the + ensuing mailing list discussion. */ + var = get_base_address (ref); + if (var && DECL_P (var)) + TREE_ADDRESSABLE (var) = 1; } /* A subroutine of get_expr_operands to handle INDIRECT_REF, - ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. + ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. STMT is the statement being processed, EXPR is the INDIRECT_REF that got us here. - - FLAGS is as in get_expr_operands. - FULL_REF contains the full pointer dereference expression, if we - have it, or NULL otherwise. - - OFFSET and SIZE are the location of the access inside the - dereferenced pointer, if known. + FLAGS is as in get_expr_operands. RECURSE_ON_BASE should be set to true if we want to continue calling get_expr_operands on the base pointer, and false if something else will do it for us. */ static void -get_indirect_ref_operands (tree stmt, tree expr, int flags, - tree full_ref, - HOST_WIDE_INT offset, HOST_WIDE_INT size, +get_indirect_ref_operands (gimple stmt, tree expr, int flags, bool recurse_on_base) { tree *pptr = &TREE_OPERAND (expr, 0); - tree ptr = *pptr; - stmt_ann_t s_ann = stmt_ann (stmt); - /* Stores into INDIRECT_REF operands are never killing definitions. */ - flags &= ~opf_kill_def; - - if (SSA_VAR_P (ptr)) - { - struct ptr_info_def *pi = NULL; + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); - /* If PTR has flow-sensitive points-to information, use it. */ - if (TREE_CODE (ptr) == SSA_NAME - && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL - && pi->name_mem_tag) - { - /* PTR has its own memory tag. Use it. */ - add_virtual_operand (pi->name_mem_tag, s_ann, flags, - full_ref, offset, size, false); - } - else - { - /* If PTR is not an SSA_NAME or it doesn't have a name - tag, use its symbol memory tag. */ - var_ann_t v_ann; - - /* If we are emitting debugging dumps, display a warning if - PTR is an SSA_NAME with no flow-sensitive alias - information. That means that we may need to compute - aliasing again. */ - if (dump_file - && TREE_CODE (ptr) == SSA_NAME - && pi == NULL) - { - fprintf (dump_file, - "NOTE: no flow-sensitive alias info for "); - print_generic_expr (dump_file, ptr, dump_flags); - fprintf (dump_file, " in "); - print_generic_stmt (dump_file, stmt, dump_flags); - } - - if (TREE_CODE (ptr) == SSA_NAME) - ptr = SSA_NAME_VAR (ptr); - v_ann = var_ann (ptr); - - if (v_ann->symbol_mem_tag) - add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags, - full_ref, offset, size, false); - } - } - else if (TREE_CODE (ptr) == INTEGER_CST) - { - /* If a constant is used as a pointer, we can't generate a real - operand for it but we mark the statement volatile to prevent - optimizations from messing things up. */ - if (s_ann) - s_ann->has_volatile_ops = true; - return; - } - else - { - /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ - gcc_unreachable (); - } + /* Add the VOP. */ + add_virtual_operand (stmt, flags); /* If requested, add a USE operand for the base pointer. */ if (recurse_on_base) - get_expr_operands (stmt, pptr, opf_none); + get_expr_operands (stmt, pptr, + opf_use | (flags & opf_no_vops)); } /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ static void -get_tmr_operands (tree stmt, tree expr, int flags) +get_tmr_operands (gimple stmt, tree expr, int flags) { - tree tag = TMR_TAG (expr), ref; - HOST_WIDE_INT offset, size, maxsize; - subvar_t svars, sv; - stmt_ann_t s_ann = stmt_ann (stmt); + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); /* First record the real operands. */ - get_expr_operands (stmt, &TMR_BASE (expr), opf_none); - get_expr_operands (stmt, &TMR_INDEX (expr), opf_none); - - /* MEM_REFs should never be killing. */ - flags &= ~opf_kill_def; + get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); + get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); if (TMR_SYMBOL (expr)) - { - stmt_ann_t ann = stmt_ann (stmt); - add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken); - } - - if (!tag) - { - /* Something weird, so ensure that we will be careful. */ - stmt_ann (stmt)->has_volatile_ops = true; - return; - } - - if (DECL_P (tag)) - { - get_expr_operands (stmt, &tag, flags); - return; - } - - ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize); - gcc_assert (ref != NULL_TREE); - svars = get_subvars_for_var (ref); - for (sv = svars; sv; sv = sv->next) - { - bool exact; - if (overlap_subvar (offset, maxsize, sv->var, &exact)) - { - int subvar_flags = flags; - if (!exact || size != maxsize) - subvar_flags &= ~opf_kill_def; - add_stmt_operand (&sv->var, s_ann, subvar_flags); - } - } -} - - -/* Add clobbering definitions for .GLOBAL_VAR or for each of the call - clobbered variables in the function. */ - -static void -add_call_clobber_ops (tree stmt, tree callee) -{ - unsigned u; - bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); - bitmap not_read_b, not_written_b; - - /* Functions that are not const, pure or never return may clobber - call-clobbered variables. */ - if (s_ann) - s_ann->makes_clobbering_call = true; - - /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases - for the heuristic used to decide whether to create .GLOBAL_VAR or not. */ - if (global_var) - { - add_stmt_operand (&global_var, s_ann, opf_is_def); - return; - } - - /* Get info for local and module level statics. There is a bit - set for each static if the call being processed does not read - or write that variable. */ - not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; - not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; - /* Add a V_MAY_DEF operand for every call clobbered variable. */ - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) - { - tree var = referenced_var_lookup (u); - unsigned int escape_mask = var_ann (var)->escape_mask; - tree real_var = var; - bool not_read; - bool not_written; - - /* Not read and not written are computed on regular vars, not - subvars, so look at the parent var if this is an SFT. */ - if (TREE_CODE (var) == STRUCT_FIELD_TAG) - real_var = SFT_PARENT_VAR (var); - - not_read = not_read_b ? bitmap_bit_p (not_read_b, - DECL_UID (real_var)) : false; - not_written = not_written_b ? bitmap_bit_p (not_written_b, - DECL_UID (real_var)) : false; - gcc_assert (!unmodifiable_var_p (var)); - - clobber_stats.clobbered_vars++; - - /* See if this variable is really clobbered by this function. */ - - /* Trivial case: Things escaping only to pure/const are not - clobbered by non-pure-const, and only read by pure/const. */ - if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0) - { - tree call = get_call_expr_in (stmt); - if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) - { - add_stmt_operand (&var, s_ann, opf_none); - clobber_stats.unescapable_clobbers_avoided++; - continue; - } - else - { - clobber_stats.unescapable_clobbers_avoided++; - continue; - } - } - - if (not_written) - { - clobber_stats.static_write_clobbers_avoided++; - if (!not_read) - add_stmt_operand (&var, s_ann, opf_none); - else - clobber_stats.static_read_clobbers_avoided++; - } - else - add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true); - } -} - - -/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the - function. */ - -static void -add_call_read_ops (tree stmt, tree callee) -{ - unsigned u; - bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); - bitmap not_read_b; - - /* if the function is not pure, it may reference memory. Add - a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var - for the heuristic used to decide whether to create .GLOBAL_VAR. */ - if (global_var) - { - add_stmt_operand (&global_var, s_ann, opf_none); - return; - } - - not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; + mark_address_taken (TMR_SYMBOL (expr)); - /* Add a VUSE for each call-clobbered variable. */ - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) - { - tree var = referenced_var (u); - tree real_var = var; - bool not_read; - - clobber_stats.readonly_clobbers++; - - /* Not read and not written are computed on regular vars, not - subvars, so look at the parent var if this is an SFT. */ - - if (TREE_CODE (var) == STRUCT_FIELD_TAG) - real_var = SFT_PARENT_VAR (var); - - not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var)) - : false; - - if (not_read) - { - clobber_stats.static_readonly_clobbers_avoided++; - continue; - } - - add_stmt_operand (&var, s_ann, opf_none | opf_non_specific); - } + add_virtual_operand (stmt, flags); } -/* A subroutine of get_expr_operands to handle CALL_EXPR. */ +/* If STMT is a call that may clobber globals and other symbols that + escape, add them to the VDEF/VUSE lists for it. */ static void -get_call_expr_operands (tree stmt, tree expr) +maybe_add_call_vops (gimple stmt) { - tree op; - int call_flags = call_expr_flags (expr); + int call_flags = gimple_call_flags (stmt); - /* If aliases have been computed already, add V_MAY_DEF or V_USE + /* If aliases have been computed already, add VDEF or VUSE operands for all the symbols that have been found to be - call-clobbered. - - Note that if aliases have not been computed, the global effects - of calls will not be included in the SSA web. This is fine - because no optimizer should run before aliases have been - computed. By not bothering with virtual operands for CALL_EXPRs - we avoid adding superfluous virtual operands, which can be a - significant compile time sink (See PR 15855). */ - if (aliases_computed_p - && !bitmap_empty_p (call_clobbered_vars) - && !(call_flags & ECF_NOVOPS)) + call-clobbered. */ + if (!(call_flags & ECF_NOVOPS)) { - /* A 'pure' or a 'const' function never call-clobbers anything. - A 'noreturn' function might, but since we don't return anyway - there is no point in recording that. */ - if (TREE_SIDE_EFFECTS (expr) - && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) - add_call_clobber_ops (stmt, get_callee_fndecl (expr)); + /* A 'pure' or a 'const' function never call-clobbers anything. + A 'noreturn' function might, but since we don't return anyway + there is no point in recording that. */ + if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) + add_virtual_operand (stmt, opf_def); else if (!(call_flags & ECF_CONST)) - add_call_read_ops (stmt, get_callee_fndecl (expr)); + add_virtual_operand (stmt, opf_use); } - - /* Find uses in the called function. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); - - for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op)) - get_expr_operands (stmt, &TREE_VALUE (op), opf_none); - - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); } /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ static void -get_asm_expr_operands (tree stmt) +get_asm_expr_operands (gimple stmt) { - stmt_ann_t s_ann = stmt_ann (stmt); - int noutputs = list_length (ASM_OUTPUTS (stmt)); - const char **oconstraints - = (const char **) alloca ((noutputs) * sizeof (const char *)); - int i; - tree link; + size_t i, noutputs; + const char **oconstraints; const char *constraint; bool allows_mem, allows_reg, is_inout; - for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link)) + noutputs = gimple_asm_noutputs (stmt); + oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); + + /* Gather all output operands. */ + for (i = 0; i < gimple_asm_noutputs (stmt); i++) { + tree link = gimple_asm_output_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); oconstraints[i] = constraint; parse_output_constraint (&constraint, i, 0, 0, &allows_mem, @@ -1734,97 +800,37 @@ get_asm_expr_operands (tree stmt) /* Memory operands are addressable. Note that STMT needs the address of this operand. */ if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t) && s_ann) - add_to_addressable_set (t, &s_ann->addresses_taken); - } + mark_address_taken (TREE_VALUE (link)); - get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def); + get_expr_operands (stmt, &TREE_VALUE (link), opf_def); } - for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link)) + /* Gather all input operands. */ + for (i = 0; i < gimple_asm_ninputs (stmt); i++) { + tree link = gimple_asm_input_op (stmt, i); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); - parse_input_constraint (&constraint, 0, 0, noutputs, 0, - oconstraints, &allows_mem, &allows_reg); + parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, + &allows_mem, &allows_reg); /* Memory operands are addressable. Note that STMT needs the address of this operand. */ if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t) && s_ann) - add_to_addressable_set (t, &s_ann->addresses_taken); - } + mark_address_taken (TREE_VALUE (link)); get_expr_operands (stmt, &TREE_VALUE (link), 0); } - - /* Clobber memory for asm ("" : : : "memory"); */ - for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link)) - if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) - { - unsigned i; - bitmap_iterator bi; - - /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we - decided to group them). */ - if (global_var) - add_stmt_operand (&global_var, s_ann, opf_is_def); - else - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) - { - tree var = referenced_var (i); - add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); - } - - /* Now clobber all addressables. */ - EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi) - { - tree var = referenced_var (i); - - /* Subvars are explicitly represented in this list, so - we don't need the original to be added to the clobber - ops, but the original *will* be in this list because - we keep the addressability of the original - variable up-to-date so we don't screw up the rest of - the backend. */ - if (var_can_have_subvars (var) - && get_subvars_for_var (var) != NULL) - continue; - - add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); - } - - break; - } -} - - -/* Scan operands for the assignment expression EXPR in statement STMT. */ - -static void -get_modify_expr_operands (tree stmt, tree expr) -{ - /* First get operands from the RHS. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - - /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE - registers. If the LHS is a store to memory, we will either need - a preserving definition (V_MAY_DEF) or a killing definition - (V_MUST_DEF). - - Preserving definitions are those that modify a part of an - aggregate object for which no subvars have been computed (or the - reference does not correspond exactly to one of them). Stores - through a pointer are also represented with V_MAY_DEF operators. - - The determination of whether to use a preserving or a killing - definition is done while scanning the LHS of the assignment. By - default, assume that we will emit a V_MUST_DEF. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def); + /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ + for (i = 0; i < gimple_asm_nclobbers (stmt); i++) + { + tree link = gimple_asm_clobber_op (stmt, i); + if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) + { + add_virtual_operand (stmt, opf_def); + break; + } + } } @@ -1833,18 +839,21 @@ get_modify_expr_operands (tree stmt, tree expr) interpret the operands found. */ static void -get_expr_operands (tree stmt, tree *expr_p, int flags) +get_expr_operands (gimple stmt, tree *expr_p, int flags) { enum tree_code code; - enum tree_code_class class; + enum tree_code_class codeclass; tree expr = *expr_p; - stmt_ann_t s_ann = stmt_ann (stmt); + int uflags = opf_use; if (expr == NULL) return; + if (is_gimple_debug (stmt)) + uflags |= (flags & opf_no_vops); + code = TREE_CODE (expr); - class = TREE_CODE_CLASS (code); + codeclass = TREE_CODE_CLASS (code); switch (code) { @@ -1853,7 +862,8 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) reference to it, but the fact that the statement takes its address will be of interest to some passes (e.g. alias resolution). */ - add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken); + if (!is_gimple_debug (stmt)) + mark_address_taken (TREE_OPERAND (expr, 0)); /* If the address is invariant, there may be no interesting variable references inside. */ @@ -1870,33 +880,18 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) return; case SSA_NAME: - case STRUCT_FIELD_TAG: - case SYMBOL_MEMORY_TAG: - case NAME_MEMORY_TAG: - add_stmt_operand (expr_p, s_ann, flags); + add_stmt_operand (expr_p, stmt, flags); return; case VAR_DECL: case PARM_DECL: case RESULT_DECL: - { - subvar_t svars; - - /* Add the subvars for a variable, if it has subvars, to DEFS - or USES. Otherwise, add the variable itself. Whether it - goes to USES or DEFS depends on the operand flags. */ - if (var_can_have_subvars (expr) - && (svars = get_subvars_for_var (expr))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - add_stmt_operand (&sv->var, s_ann, flags); - } - else - add_stmt_operand (expr_p, s_ann, flags); + add_stmt_operand (expr_p, stmt, flags); + return; - return; - } + case DEBUG_EXPR_DECL: + gcc_assert (gimple_debug_bind_p (stmt)); + return; case MISALIGNED_INDIRECT_REF: get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); @@ -1904,7 +899,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case ALIGN_INDIRECT_REF: case INDIRECT_REF: - get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true); + get_indirect_ref_operands (stmt, expr, flags, true); return; case TARGET_MEM_REF: @@ -1917,63 +912,22 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case REALPART_EXPR: case IMAGPART_EXPR: { - tree ref; - HOST_WIDE_INT offset, size, maxsize; - bool none = true; - - /* This component reference becomes an access to all of the - subvariables it can touch, if we can determine that, but - *NOT* the real one. If we can't determine which fields we - could touch, the recursion will eventually get to a - variable and add *all* of its subvars, or whatever is the - minimum correct subset. */ - ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); - if (SSA_VAR_P (ref) && get_subvars_for_var (ref)) - { - subvar_t sv; - subvar_t svars = get_subvars_for_var (ref); - - for (sv = svars; sv; sv = sv->next) - { - bool exact; - - if (overlap_subvar (offset, maxsize, sv->var, &exact)) - { - int subvar_flags = flags; - none = false; - if (!exact || size != maxsize) - subvar_flags &= ~opf_kill_def; - add_stmt_operand (&sv->var, s_ann, subvar_flags); - } - } - - if (!none) - flags |= opf_no_vops; - } - else if (TREE_CODE (ref) == INDIRECT_REF) - { - get_indirect_ref_operands (stmt, ref, flags, expr, offset, - maxsize, false); - flags |= opf_no_vops; - } + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); - /* Even if we found subvars above we need to ensure to see - immediate uses for d in s.a[d]. In case of s.a having - a subvar or we would miss it otherwise. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), - flags & ~opf_kill_def); - if (code == COMPONENT_REF) { - if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) - s_ann->has_volatile_ops = true; - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); + if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) + gimple_set_has_volatile_ops (stmt, true); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); } else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) { - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); } return; @@ -1982,23 +936,15 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case WITH_SIZE_EXPR: /* WITH_SIZE_EXPR is a pass-through reference to its first argument, and an rvalue reference to its second argument. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; - case CALL_EXPR: - get_call_expr_operands (stmt, expr); - return; - case COND_EXPR: case VEC_COND_EXPR: - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); - return; - - case MODIFY_EXPR: - get_modify_expr_operands (stmt, expr); + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); return; case CONSTRUCTOR: @@ -2011,16 +957,15 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) for (idx = 0; VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); idx++) - get_expr_operands (stmt, &ce->value, opf_none); + get_expr_operands (stmt, &ce->value, uflags); return; } case BIT_FIELD_REF: - /* Stores using BIT_FIELD_REF are always preserving definitions. */ - flags &= ~opf_kill_def; - - /* Fallthru */ + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + /* FALLTHRU */ case TRUTH_NOT_EXPR: case VIEW_CONVERT_EXPR: @@ -2050,30 +995,19 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) return; } - case BLOCK: case FUNCTION_DECL: - case EXC_PTR_EXPR: - case FILTER_EXPR: case LABEL_DECL: case CONST_DECL: - case OMP_PARALLEL: - case OMP_SECTIONS: - case OMP_FOR: - case OMP_SINGLE: - case OMP_MASTER: - case OMP_ORDERED: - case OMP_CRITICAL: - case OMP_RETURN: - case OMP_CONTINUE: + case CASE_LABEL_EXPR: /* Expressions that make no memory references. */ return; default: - if (class == tcc_unary) + if (codeclass == tcc_unary) goto do_unary; - if (class == tcc_binary || class == tcc_comparison) + if (codeclass == tcc_binary || codeclass == tcc_comparison) goto do_binary; - if (class == tcc_constant || class == tcc_type) + if (codeclass == tcc_constant || codeclass == tcc_type) return; } @@ -2091,59 +1025,35 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) build_* operand vectors will have potential operands in them. */ static void -parse_ssa_operands (tree stmt) +parse_ssa_operands (gimple stmt) { - enum tree_code code; + enum gimple_code code = gimple_code (stmt); - code = TREE_CODE (stmt); - switch (code) + if (code == GIMPLE_ASM) + get_asm_expr_operands (stmt); + else if (is_gimple_debug (stmt)) { - case MODIFY_EXPR: - get_modify_expr_operands (stmt, stmt); - break; - - case COND_EXPR: - get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none); - break; - - case SWITCH_EXPR: - get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none); - break; - - case ASM_EXPR: - get_asm_expr_operands (stmt); - break; - - case RETURN_EXPR: - get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none); - break; - - case GOTO_EXPR: - get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none); - break; + if (gimple_debug_bind_p (stmt) + && gimple_debug_bind_has_value_p (stmt)) + get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), + opf_use | opf_no_vops); + } + else + { + size_t i, start = 0; - case LABEL_EXPR: - get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none); - break; + if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) + { + get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); + start = 1; + } - case BIND_EXPR: - case CASE_LABEL_EXPR: - case TRY_CATCH_EXPR: - case TRY_FINALLY_EXPR: - case EH_FILTER_EXPR: - case CATCH_EXPR: - case RESX_EXPR: - /* These nodes contain no variable references. */ - break; + for (i = start; i < gimple_num_ops (stmt); i++) + get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); - default: - /* Notice that if get_expr_operands tries to use &STMT as the - operand pointer (which may only happen for USE operands), we - will fail in add_stmt_operand. This default will handle - statements like empty statements, or CALL_EXPRs that may - appear on the RHS of a statement or as statements themselves. */ - get_expr_operands (stmt, &stmt, opf_none); - break; + /* Add call-clobbered operands, if needed. */ + if (code == GIMPLE_CALL) + maybe_add_call_vops (stmt); } } @@ -2151,189 +1061,70 @@ parse_ssa_operands (tree stmt) /* Create an operands cache for STMT. */ static void -build_ssa_operands (tree stmt) +build_ssa_operands (gimple stmt) { - stmt_ann_t ann = get_stmt_ann (stmt); - /* Initially assume that the statement has no volatile operands. */ - if (ann) - ann->has_volatile_ops = false; + gimple_set_has_volatile_ops (stmt, false); start_ssa_stmt_operands (); - parse_ssa_operands (stmt); - operand_build_sort_virtual (build_vuses); - operand_build_sort_virtual (build_v_may_defs); - operand_build_sort_virtual (build_v_must_defs); - finalize_ssa_stmt_operands (stmt); } -/* Free any operands vectors in OPS. */ - -void -free_ssa_operands (stmt_operands_p ops) -{ - ops->def_ops = NULL; - ops->use_ops = NULL; - ops->maydef_ops = NULL; - ops->mustdef_ops = NULL; - ops->vuse_ops = NULL; -} - - -/* Get the operands of statement STMT. */ - -void -update_stmt_operands (tree stmt) -{ - stmt_ann_t ann = get_stmt_ann (stmt); - - /* If update_stmt_operands is called before SSA is initialized, do - nothing. */ - if (!ssa_operands_active ()) - return; - - /* The optimizers cannot handle statements that are nothing but a - _DECL. This indicates a bug in the gimplifier. */ - gcc_assert (!SSA_VAR_P (stmt)); - - gcc_assert (ann->modified); - - timevar_push (TV_TREE_OPS); - - build_ssa_operands (stmt); - - /* Clear the modified bit for STMT. */ - ann->modified = 0; - - timevar_pop (TV_TREE_OPS); -} - - -/* Copies virtual operands from SRC to DST. */ +/* Releases the operands of STMT back to their freelists, and clears + the stmt operand lists. */ void -copy_virtual_operands (tree dest, tree src) +free_stmt_operands (gimple stmt) { - tree t; - ssa_op_iter iter, old_iter; - use_operand_p use_p, u2; - def_operand_p def_p, d2; - - build_ssa_operands (dest); - - /* Copy all the virtual fields. */ - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE) - append_vuse (t); - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF) - append_v_may_def (t); - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF) - append_v_must_def (t); - - if (VEC_length (tree, build_vuses) == 0 - && VEC_length (tree, build_v_may_defs) == 0 - && VEC_length (tree, build_v_must_defs) == 0) - return; + def_optype_p defs = gimple_def_ops (stmt), last_def; + use_optype_p uses = gimple_use_ops (stmt), last_use; - /* Now commit the virtual operands to this stmt. */ - finalize_ssa_v_must_defs (dest); - finalize_ssa_v_may_defs (dest); - finalize_ssa_vuses (dest); - - /* Finally, set the field to the same values as then originals. */ - t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE); - FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE) + if (defs) { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, t); - t = op_iter_next_tree (&old_iter); + for (last_def = defs; last_def->next; last_def = last_def->next) + continue; + last_def->next = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs = defs; + gimple_set_def_ops (stmt, NULL); } - gcc_assert (op_iter_done (&old_iter)); - op_iter_init_maydef (&old_iter, src, &u2, &d2); - FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter) + if (uses) { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, USE_FROM_PTR (u2)); - SET_DEF (def_p, DEF_FROM_PTR (d2)); - op_iter_next_maymustdef (&u2, &d2, &old_iter); + for (last_use = uses; last_use->next; last_use = last_use->next) + delink_imm_use (USE_OP_PTR (last_use)); + delink_imm_use (USE_OP_PTR (last_use)); + last_use->next = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses = uses; + gimple_set_use_ops (stmt, NULL); } - gcc_assert (op_iter_done (&old_iter)); - op_iter_init_mustdef (&old_iter, src, &u2, &d2); - FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter) + if (gimple_has_mem_ops (stmt)) { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, USE_FROM_PTR (u2)); - SET_DEF (def_p, DEF_FROM_PTR (d2)); - op_iter_next_maymustdef (&u2, &d2, &old_iter); + gimple_set_vuse (stmt, NULL_TREE); + gimple_set_vdef (stmt, NULL_TREE); } - gcc_assert (op_iter_done (&old_iter)); - } -/* Specifically for use in DOM's expression analysis. Given a store, we - create an artificial stmt which looks like a load from the store, this can - be used to eliminate redundant loads. OLD_OPS are the operands from the - store stmt, and NEW_STMT is the new load which represents a load of the - values stored. */ +/* Get the operands of statement STMT. */ void -create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt) +update_stmt_operands (gimple stmt) { - stmt_ann_t ann; - tree op; - ssa_op_iter iter; - use_operand_p use_p; - unsigned x; - - ann = get_stmt_ann (new_stmt); + /* If update_stmt_operands is called before SSA is initialized, do + nothing. */ + if (!ssa_operands_active ()) + return; - /* Process the stmt looking for operands. */ - start_ssa_stmt_operands (); - parse_ssa_operands (new_stmt); + timevar_push (TV_TREE_OPS); - for (x = 0; x < VEC_length (tree, build_vuses); x++) - { - tree t = VEC_index (tree, build_vuses, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_vuse_list = 0; - } - } - - for (x = 0; x < VEC_length (tree, build_v_may_defs); x++) - { - tree t = VEC_index (tree, build_v_may_defs, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_v_may_def_list = 0; - } - } + gcc_assert (gimple_modified_p (stmt)); + build_ssa_operands (stmt); + gimple_set_modified (stmt, false); - /* Remove any virtual operands that were found. */ - VEC_truncate (tree, build_v_may_defs, 0); - VEC_truncate (tree, build_v_must_defs, 0); - VEC_truncate (tree, build_vuses, 0); - - /* For each VDEF on the original statement, we want to create a - VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new - statement. */ - FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, - (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)) - append_vuse (op); - - /* Now build the operands for this new stmt. */ - finalize_ssa_stmt_operands (new_stmt); - - /* All uses in this fake stmt must not be in the immediate use lists. */ - FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) - delink_imm_use (use_p); + timevar_pop (TV_TREE_OPS); } @@ -2341,7 +1132,7 @@ create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt) to test the validity of the swap operation. */ void -swap_tree_operands (tree stmt, tree *exp0, tree *exp1) +swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) { tree op0, op1; op0 = *exp0; @@ -2356,14 +1147,14 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1) use0 = use1 = NULL; /* Find the 2 operands in the cache, if they are there. */ - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) if (USE_OP_PTR (ptr)->use == exp0) { use0 = ptr; break; } - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) if (USE_OP_PTR (ptr)->use == exp1) { use1 = ptr; @@ -2386,51 +1177,6 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1) } -/* Add the base address of REF to the set *ADDRESSES_TAKEN. If - *ADDRESSES_TAKEN is NULL, a new set is created. REF may be - a single variable whose address has been taken or any other valid - GIMPLE memory reference (structure reference, array, etc). If the - base address of REF is a decl that has sub-variables, also add all - of its sub-variables. */ - -void -add_to_addressable_set (tree ref, bitmap *addresses_taken) -{ - tree var; - subvar_t svars; - - gcc_assert (addresses_taken); - - /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF - as the only thing we take the address of. If VAR is a structure, - taking the address of a field means that the whole structure may - be referenced using pointer arithmetic. See PR 21407 and the - ensuing mailing list discussion. */ - var = get_base_address (ref); - if (var && SSA_VAR_P (var)) - { - if (*addresses_taken == NULL) - *addresses_taken = BITMAP_GGC_ALLOC (); - - if (var_can_have_subvars (var) - && (svars = get_subvars_for_var (var))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - { - bitmap_set_bit (*addresses_taken, DECL_UID (sv->var)); - TREE_ADDRESSABLE (sv->var) = 1; - } - } - else - { - bitmap_set_bit (*addresses_taken, DECL_UID (var)); - TREE_ADDRESSABLE (var) = 1; - } - } -} - - /* Scan the immediate_use list for VAR making sure its linked properly. Return TRUE if there is a problem and emit an error message to F. */ @@ -2457,7 +1203,7 @@ verify_imm_links (FILE *f, tree var) { if (prev != ptr->prev) goto error; - + if (ptr->use == NULL) goto error; /* 2 roots, or SAFE guard node. */ else if (*(ptr->use) != var) @@ -2490,12 +1236,12 @@ verify_imm_links (FILE *f, tree var) return false; error: - if (ptr->stmt && stmt_modified_p (ptr->stmt)) + if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) { - fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt); - print_generic_stmt (f, ptr->stmt, TDF_SLIM); + fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); + print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); } - fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, + fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, (void *)ptr->use); print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); fprintf(f, "\n"); @@ -2525,13 +1271,13 @@ dump_immediate_uses_for (FILE *file, tree var) FOR_EACH_IMM_USE_FAST (use_p, iter, var) { - if (use_p->stmt == NULL && use_p->use == NULL) + if (use_p->loc.stmt == NULL && use_p->use == NULL) fprintf (file, "***end of stmt iterator marker***\n"); else if (!is_gimple_reg (USE_FROM_PTR (use_p))) - print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS); + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); else - print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM); + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); } fprintf(file, "\n"); } @@ -2573,4 +1319,28 @@ debug_immediate_uses_for (tree var) dump_immediate_uses_for (stderr, var); } -#include "gt-tree-ssa-operands.h" + +/* Unlink STMTs virtual definition from the IL by propagating its use. */ + +void +unlink_stmt_vdef (gimple stmt) +{ + use_operand_p use_p; + imm_use_iterator iter; + gimple use_stmt; + tree vdef = gimple_vdef (stmt); + + if (!vdef + || TREE_CODE (vdef) != SSA_NAME) + return; + + FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt)) + { + FOR_EACH_IMM_USE_ON_STMT (use_p, iter) + SET_USE (use_p, gimple_vuse (stmt)); + } + + if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt))) + SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1; +} +