X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-operands.c;h=aa37b972628c34c28e93060b9e16a1bc1641f09e;hb=1e6c5f6367b032480c0975f8788388d467781a1b;hp=ccc2033dbc0daf880642267bd7ca7d7460f114aa;hpb=5d90eb152d69481a56df1d0c762aadd97d331aa9;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index ccc2033dbc0..aa37b972628 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -1,11 +1,12 @@ /* SSA operands management for trees. - Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 + Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by -the Free Software Foundation; either version 2, or (at your option) +the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, @@ -14,9 +15,8 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. */ +along with GCC; see the file COPYING3. If not see +. */ #include "config.h" #include "system.h" @@ -26,70 +26,94 @@ Boston, MA 02111-1307, USA. */ #include "flags.h" #include "function.h" #include "diagnostic.h" -#include "errors.h" +#include "tree-pretty-print.h" +#include "gimple-pretty-print.h" #include "tree-flow.h" #include "tree-inline.h" #include "tree-pass.h" #include "ggc.h" #include "timevar.h" - +#include "toplev.h" #include "langhooks.h" +#include "ipa-reference.h" -/* This file contains the code required to manage the operands cache of the - SSA optimizer. For every stmt, we maintain an operand cache in the stmt - annotation. This cache contains operands that will be of interest to - optimizers and other passes wishing to manipulate the IL. +/* This file contains the code required to manage the operands cache of the + SSA optimizer. For every stmt, we maintain an operand cache in the stmt + annotation. This cache contains operands that will be of interest to + optimizers and other passes wishing to manipulate the IL. - The operand type are broken up into REAL and VIRTUAL operands. The real - operands are represented as pointers into the stmt's operand tree. Thus + The operand type are broken up into REAL and VIRTUAL operands. The real + operands are represented as pointers into the stmt's operand tree. Thus any manipulation of the real operands will be reflected in the actual tree. - Virtual operands are represented solely in the cache, although the base - variable for the SSA_NAME may, or may not occur in the stmt's tree. + Virtual operands are represented solely in the cache, although the base + variable for the SSA_NAME may, or may not occur in the stmt's tree. Manipulation of the virtual operands will not be reflected in the stmt tree. - The routines in this file are concerned with creating this operand cache + The routines in this file are concerned with creating this operand cache from a stmt tree. - The operand tree is the parsed by the various get_* routines which look - through the stmt tree for the occurrence of operands which may be of - interest, and calls are made to the append_* routines whenever one is - found. There are 5 of these routines, each representing one of the - 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and - Virtual Must Defs. + The operand tree is the parsed by the various get_* routines which look + through the stmt tree for the occurrence of operands which may be of + interest, and calls are made to the append_* routines whenever one is + found. There are 4 of these routines, each representing one of the + 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. - The append_* routines check for duplication, and simply keep a list of + The append_* routines check for duplication, and simply keep a list of unique objects for each operand type in the build_* extendable vectors. - Once the stmt tree is completely parsed, the finalize_ssa_operands() - routine is called, which proceeds to perform the finalization routine - on each of the 5 operand vectors which have been built up. + Once the stmt tree is completely parsed, the finalize_ssa_operands() + routine is called, which proceeds to perform the finalization routine + on each of the 4 operand vectors which have been built up. - If the stmt had a previous operand cache, the finalization routines - attempt to match up the new operands with the old ones. If it's a perfect - match, the old vector is simply reused. If it isn't a perfect match, then - a new vector is created and the new operands are placed there. For - virtual operands, if the previous cache had SSA_NAME version of a - variable, and that same variable occurs in the same operands cache, then + If the stmt had a previous operand cache, the finalization routines + attempt to match up the new operands with the old ones. If it's a perfect + match, the old vector is simply reused. If it isn't a perfect match, then + a new vector is created and the new operands are placed there. For + virtual operands, if the previous cache had SSA_NAME version of a + variable, and that same variable occurs in the same operands cache, then the new cache vector will also get the same SSA_NAME. - i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand - vector for VUSE, then the new vector will also be modified such that - it contains 'a_5' rather than 'a'. + i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new + operand vector for VUSE, then the new vector will also be modified + such that it contains 'a_5' rather than 'a'. */ + +/* Structure storing statistics on how many call clobbers we have, and + how many where avoided. */ + +static struct +{ + /* Number of call-clobbered ops we attempt to add to calls in + add_call_clobbered_mem_symbols. */ + unsigned int clobbered_vars; + + /* Number of write-clobbers (VDEFs) avoided by using + not_written information. */ + unsigned int static_write_clobbers_avoided; -*/ + /* Number of reads (VUSEs) avoided by using not_read information. */ + unsigned int static_read_clobbers_avoided; + + /* Number of write-clobbers avoided because the variable can't escape to + this call. */ + unsigned int unescapable_clobbers_avoided; + + /* Number of read-only uses we attempt to add to calls in + add_call_read_mem_symbols. */ + unsigned int readonly_clobbers; + + /* Number of read-only uses we avoid using not_read information. */ + unsigned int static_readonly_clobbers_avoided; +} clobber_stats; /* Flags to describe operand properties in helpers. */ /* By default, operands are loaded. */ -#define opf_none 0 - -/* Operand is the target of an assignment expression or a - call-clobbered variable */ -#define opf_is_def (1 << 0) +#define opf_use 0 -/* Operand is the target of an assignment expression. */ -#define opf_kill_def (1 << 1) +/* Operand is the target of an assignment expression or a + call-clobbered variable. */ +#define opf_def (1 << 0) /* No virtual operands should be created in the expression. This is used when traversing ADDR_EXPR nodes which have different semantics than @@ -97,682 +121,446 @@ Boston, MA 02111-1307, USA. */ need to consider are indices into arrays. For instance, &a.b[i] should generate a USE of 'i' but it should not generate a VUSE for 'a' nor a VUSE for 'b'. */ -#define opf_no_vops (1 << 2) +#define opf_no_vops (1 << 1) -/* This structure maintain a sorted list of operands which is created by - parse_ssa_operand. */ -struct opbuild_list_d GTY (()) -{ - varray_type vars; /* The VAR_DECLS tree. */ - varray_type uid; /* The sort value for virtual symbols. */ - varray_type next; /* The next index in the sorted list. */ - int first; /* First element in list. */ - unsigned num; /* Number of elements. */ -}; - -#define OPBUILD_LAST -1 - +/* Operand is an implicit reference. This is used to distinguish + explicit assignments in the form of MODIFY_EXPR from + clobbering sites like function calls or ASM_EXPRs. */ +#define opf_implicit (1 << 2) /* Array for building all the def operands. */ -static GTY (()) struct opbuild_list_d build_defs; +static VEC(tree,heap) *build_defs; /* Array for building all the use operands. */ -static GTY (()) struct opbuild_list_d build_uses; - -/* Array for building all the v_may_def operands. */ -static GTY (()) struct opbuild_list_d build_v_may_defs; - -/* Array for building all the vuse operands. */ -static GTY (()) struct opbuild_list_d build_vuses; - -/* Array for building all the v_must_def operands. */ -static GTY (()) struct opbuild_list_d build_v_must_defs; - -/* True if the operands for call clobbered vars are cached and valid. */ -bool ssa_call_clobbered_cache_valid; -bool ssa_ro_call_cache_valid; - -/* These arrays are the cached operand vectors for call clobbered calls. */ -static VEC(tree,heap) *clobbered_v_may_defs; -static VEC(tree,heap) *clobbered_vuses; -static VEC(tree,heap) *ro_call_vuses; -static bool clobbered_aliased_loads; -static bool clobbered_aliased_stores; -static bool ro_call_aliased_loads; -static bool ops_active = false; - -static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL; -static unsigned operand_memory_index; - -static void note_addressable (tree, stmt_ann_t); -static void get_expr_operands (tree, tree *, int); -static void get_asm_expr_operands (tree); -static void get_indirect_ref_operands (tree, tree, int); -static void get_call_expr_operands (tree, tree); -static inline void append_def (tree *); -static inline void append_use (tree *); -static void append_v_may_def (tree); -static void append_v_must_def (tree); -static void add_call_clobber_ops (tree); -static void add_call_read_ops (tree); -static void add_stmt_operand (tree *, stmt_ann_t, int); -static void build_ssa_operands (tree stmt); - -static def_optype_p free_defs = NULL; -static use_optype_p free_uses = NULL; -static vuse_optype_p free_vuses = NULL; -static maydef_optype_p free_maydefs = NULL; -static mustdef_optype_p free_mustdefs = NULL; - -/* Initialize a virtual operand build LIST called NAME with NUM elements. */ +static VEC(tree,heap) *build_uses; -static inline void -opbuild_initialize_virtual (struct opbuild_list_d *list, int num, - const char *name) -{ - list->first = OPBUILD_LAST; - list->num = 0; - VARRAY_TREE_INIT (list->vars, num, name); - VARRAY_UINT_INIT (list->uid, num, "List UID"); - VARRAY_INT_INIT (list->next, num, "List NEXT"); -} +/* The built VDEF operand. */ +static tree build_vdef; +/* The built VUSE operand. */ +static tree build_vuse; -/* Initialize a real operand build LIST called NAME with NUM elements. */ +/* Bitmap obstack for our datastructures that needs to survive across + compilations of multiple functions. */ +static bitmap_obstack operands_bitmap_obstack; -static inline void -opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name) +static void get_expr_operands (gimple, tree *, int); + +/* Number of functions with initialized ssa_operands. */ +static int n_initialized = 0; + +/* Return the DECL_UID of the base variable of T. */ + +static inline unsigned +get_name_decl (const_tree t) { - list->first = OPBUILD_LAST; - list->num = 0; - VARRAY_TREE_PTR_INIT (list->vars, num, name); - VARRAY_INT_INIT (list->next, num, "List NEXT"); - /* The UID field is not needed since we sort based on the pointer value. */ - list->uid = NULL; + if (TREE_CODE (t) != SSA_NAME) + return DECL_UID (t); + else + return DECL_UID (SSA_NAME_VAR (t)); } -/* Free memory used in virtual operand build object LIST. */ +/* Return true if the SSA operands cache is active. */ -static inline void -opbuild_free (struct opbuild_list_d *list) +bool +ssa_operands_active (void) { - list->vars = NULL; - list->uid = NULL; - list->next = NULL; + /* This function may be invoked from contexts where CFUN is NULL + (IPA passes), return false for now. FIXME: operands may be + active in each individual function, maybe this function should + take CFUN as a parameter. */ + if (cfun == NULL) + return false; + + return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; } -/* Number of elements in an opbuild list. */ +/* Create the VOP variable, an artificial global variable to act as a + representative of all of the virtual operands FUD chain. */ -static inline unsigned -opbuild_num_elems (struct opbuild_list_d *list) +static void +create_vop_var (void) { - return list->num; + tree global_var; + + gcc_assert (cfun->gimple_df->vop == NULL_TREE); + + global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, + get_identifier (".MEM"), + void_type_node); + DECL_ARTIFICIAL (global_var) = 1; + TREE_READONLY (global_var) = 0; + DECL_EXTERNAL (global_var) = 1; + TREE_STATIC (global_var) = 1; + TREE_USED (global_var) = 1; + DECL_CONTEXT (global_var) = NULL_TREE; + TREE_THIS_VOLATILE (global_var) = 0; + TREE_ADDRESSABLE (global_var) = 0; + + create_var_ann (global_var); + add_referenced_var (global_var); + cfun->gimple_df->vop = global_var; } +/* These are the sizes of the operand memory buffer in bytes which gets + allocated each time more operands space is required. The final value is + the amount that is allocated every time after that. + In 1k we can fit 25 use operands (or 63 def operands) on a host with + 8 byte pointers, that would be 10 statements each with 1 def and 2 + uses. */ -/* Add VAR to the real operand list LIST, keeping it sorted and avoiding - duplicates. The actual sort value is the tree pointer value. */ +#define OP_SIZE_INIT 0 +#define OP_SIZE_1 (1024 - sizeof (void *)) +#define OP_SIZE_2 (1024 * 4 - sizeof (void *)) +#define OP_SIZE_3 (1024 * 16 - sizeof (void *)) -static inline void -opbuild_append_real (struct opbuild_list_d *list, tree *var) -{ - int index; +/* Initialize the operand cache routines. */ -#ifdef ENABLE_CHECKING - /* Ensure the real operand doesn't exist already. */ - for (index = list->first; - index != OPBUILD_LAST; - index = VARRAY_INT (list->next, index)) - gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var); -#endif +void +init_ssa_operands (void) +{ + if (!n_initialized++) + { + build_defs = VEC_alloc (tree, heap, 5); + build_uses = VEC_alloc (tree, heap, 10); + build_vuse = NULL_TREE; + build_vdef = NULL_TREE; + bitmap_obstack_initialize (&operands_bitmap_obstack); + } - /* First item in the list. */ - index = VARRAY_ACTIVE_SIZE (list->vars); - if (index == 0) - list->first = index; - else - VARRAY_INT (list->next, index - 1) = index; - VARRAY_PUSH_INT (list->next, OPBUILD_LAST); - VARRAY_PUSH_TREE_PTR (list->vars, var); - list->num++; + gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); + gimple_ssa_operands (cfun)->operand_memory_index + = gimple_ssa_operands (cfun)->ssa_operand_mem_size; + gimple_ssa_operands (cfun)->ops_active = true; + memset (&clobber_stats, 0, sizeof (clobber_stats)); + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; + create_vop_var (); } -/* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding - duplicates. The actual sort value is the DECL UID of the base variable. */ +/* Dispose of anything required by the operand routines. */ -static inline void -opbuild_append_virtual (struct opbuild_list_d *list, tree var) +void +fini_ssa_operands (void) { - int index, curr, last; - unsigned int var_uid; - - if (TREE_CODE (var) != SSA_NAME) - var_uid = DECL_UID (var); - else - var_uid = DECL_UID (SSA_NAME_VAR (var)); - - index = VARRAY_ACTIVE_SIZE (list->vars); + struct ssa_operand_memory_d *ptr; - if (index == 0) + if (!--n_initialized) { - VARRAY_PUSH_TREE (list->vars, var); - VARRAY_PUSH_UINT (list->uid, var_uid); - VARRAY_PUSH_INT (list->next, OPBUILD_LAST); - list->first = 0; - list->num = 1; - return; + VEC_free (tree, heap, build_defs); + VEC_free (tree, heap, build_uses); + build_vdef = NULL_TREE; + build_vuse = NULL_TREE; } - last = OPBUILD_LAST; - /* Find the correct spot in the sorted list. */ - for (curr = list->first; - curr != OPBUILD_LAST; - last = curr, curr = VARRAY_INT (list->next, curr)) - { - if (VARRAY_UINT (list->uid, curr) > var_uid) - break; - } + gimple_ssa_operands (cfun)->free_defs = NULL; + gimple_ssa_operands (cfun)->free_uses = NULL; - if (last == OPBUILD_LAST) + while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) { - /* First item in the list. */ - VARRAY_PUSH_INT (list->next, list->first); - list->first = index; - } - else - { - /* Don't enter duplicates at all. */ - if (VARRAY_UINT (list->uid, last) == var_uid) - return; - - VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last)); - VARRAY_INT (list->next, last) = index; + gimple_ssa_operands (cfun)->operand_memory + = gimple_ssa_operands (cfun)->operand_memory->next; + ggc_free (ptr); } - VARRAY_PUSH_TREE (list->vars, var); - VARRAY_PUSH_UINT (list->uid, var_uid); - list->num++; -} - - -/* Return the first element index in LIST. OPBUILD_LAST means there are no - more elements. */ -static inline int -opbuild_first (struct opbuild_list_d *list) -{ - if (list->num > 0) - return list->first; - else - return OPBUILD_LAST; -} + gimple_ssa_operands (cfun)->ops_active = false; + if (!n_initialized) + bitmap_obstack_release (&operands_bitmap_obstack); -/* Return the next element after PREV in LIST. */ + cfun->gimple_df->vop = NULL_TREE; -static inline int -opbuild_next (struct opbuild_list_d *list, int prev) -{ - return VARRAY_INT (list->next, prev); + if (dump_file && (dump_flags & TDF_STATS)) + { + fprintf (dump_file, "Original clobbered vars: %d\n", + clobber_stats.clobbered_vars); + fprintf (dump_file, "Static write clobbers avoided: %d\n", + clobber_stats.static_write_clobbers_avoided); + fprintf (dump_file, "Static read clobbers avoided: %d\n", + clobber_stats.static_read_clobbers_avoided); + fprintf (dump_file, "Unescapable clobbers avoided: %d\n", + clobber_stats.unescapable_clobbers_avoided); + fprintf (dump_file, "Original read-only clobbers: %d\n", + clobber_stats.readonly_clobbers); + fprintf (dump_file, "Static read-only clobbers avoided: %d\n", + clobber_stats.static_readonly_clobbers_avoided); + } } -/* Return the real element at index ELEM in LIST. */ +/* Return memory for an operand of size SIZE. */ -static inline tree * -opbuild_elem_real (struct opbuild_list_d *list, int elem) +static inline void * +ssa_operand_alloc (unsigned size) { - return VARRAY_TREE_PTR (list->vars, elem); -} + char *ptr; + gcc_assert (size == sizeof (struct use_optype_d) + || size == sizeof (struct def_optype_d)); -/* Return the virtual element at index ELEM in LIST. */ + if (gimple_ssa_operands (cfun)->operand_memory_index + size + >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) + { + struct ssa_operand_memory_d *ptr; -static inline tree -opbuild_elem_virtual (struct opbuild_list_d *list, int elem) -{ - return VARRAY_TREE (list->vars, elem); -} + switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) + { + case OP_SIZE_INIT: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; + break; + case OP_SIZE_1: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; + break; + case OP_SIZE_2: + case OP_SIZE_3: + gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; + break; + default: + gcc_unreachable (); + } + ptr = (struct ssa_operand_memory_d *) + ggc_alloc (sizeof (void *) + + gimple_ssa_operands (cfun)->ssa_operand_mem_size); + ptr->next = gimple_ssa_operands (cfun)->operand_memory; + gimple_ssa_operands (cfun)->operand_memory = ptr; + gimple_ssa_operands (cfun)->operand_memory_index = 0; + } -/* Return the virtual element uid at index ELEM in LIST. */ -static inline unsigned int -opbuild_elem_uid (struct opbuild_list_d *list, int elem) -{ - return VARRAY_UINT (list->uid, elem); + ptr = &(gimple_ssa_operands (cfun)->operand_memory + ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); + gimple_ssa_operands (cfun)->operand_memory_index += size; + return ptr; } -/* Reset an operand build list. */ +/* Allocate a DEF operand. */ -static inline void -opbuild_clear (struct opbuild_list_d *list) +static inline struct def_optype_d * +alloc_def (void) { - list->first = OPBUILD_LAST; - VARRAY_POP_ALL (list->vars); - VARRAY_POP_ALL (list->next); - if (list->uid) - VARRAY_POP_ALL (list->uid); - list->num = 0; + struct def_optype_d *ret; + if (gimple_ssa_operands (cfun)->free_defs) + { + ret = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs + = gimple_ssa_operands (cfun)->free_defs->next; + } + else + ret = (struct def_optype_d *) + ssa_operand_alloc (sizeof (struct def_optype_d)); + return ret; } -/* Remove ELEM from LIST where PREV is the previous element. Return the next - element. */ +/* Allocate a USE operand. */ -static inline int -opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev) +static inline struct use_optype_d * +alloc_use (void) { - int ret; - if (prev != OPBUILD_LAST) + struct use_optype_d *ret; + if (gimple_ssa_operands (cfun)->free_uses) { - gcc_assert (VARRAY_INT (list->next, prev) == elem); - ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem); + ret = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses + = gimple_ssa_operands (cfun)->free_uses->next; } else - { - gcc_assert (list->first == elem); - ret = list->first = VARRAY_INT (list->next, elem); - } - list->num--; + ret = (struct use_optype_d *) + ssa_operand_alloc (sizeof (struct use_optype_d)); return ret; } -/* Return true if the ssa operands cache is active. */ +/* Adds OP to the list of defs after LAST. */ -bool -ssa_operands_active (void) +static inline def_optype_p +add_def_op (tree *op, def_optype_p last) { - return ops_active; + def_optype_p new_def; + + new_def = alloc_def (); + DEF_OP_PTR (new_def) = op; + last->next = new_def; + new_def->next = NULL; + return new_def; } -/* Initialize the operand cache routines. */ +/* Adds OP to the list of uses of statement STMT after LAST. */ -void -init_ssa_operands (void) +static inline use_optype_p +add_use_op (gimple stmt, tree *op, use_optype_p last) { - opbuild_initialize_real (&build_defs, 5, "build defs"); - opbuild_initialize_real (&build_uses, 10, "build uses"); - opbuild_initialize_virtual (&build_vuses, 25, "build_vuses"); - opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs"); - opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs"); - gcc_assert (operand_memory == NULL); - operand_memory_index = SSA_OPERAND_MEMORY_SIZE; - ops_active = true; + use_optype_p new_use; + + new_use = alloc_use (); + USE_OP_PTR (new_use)->use = op; + link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); + last->next = new_use; + new_use->next = NULL; + return new_use; } -/* Dispose of anything required by the operand routines. */ -void -fini_ssa_operands (void) -{ - struct ssa_operand_memory_d *ptr; - opbuild_free (&build_defs); - opbuild_free (&build_uses); - opbuild_free (&build_v_must_defs); - opbuild_free (&build_v_may_defs); - opbuild_free (&build_vuses); - free_defs = NULL; - free_uses = NULL; - free_vuses = NULL; - free_maydefs = NULL; - free_mustdefs = NULL; - while ((ptr = operand_memory) != NULL) - { - operand_memory = operand_memory->next; - ggc_free (ptr); - } +/* Takes elements from build_defs and turns them into def operands of STMT. + TODO -- Make build_defs VEC of tree *. */ - VEC_free (tree, heap, clobbered_v_may_defs); - VEC_free (tree, heap, clobbered_vuses); - VEC_free (tree, heap, ro_call_vuses); - ops_active = false; -} +static inline void +finalize_ssa_defs (gimple stmt) +{ + unsigned new_i; + struct def_optype_d new_list; + def_optype_p old_ops, last; + unsigned int num = VEC_length (tree, build_defs); + /* There should only be a single real definition per assignment. */ + gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); -/* Return memory for operands of SIZE chunks. */ - -static inline void * -ssa_operand_alloc (unsigned size) -{ - char *ptr; - if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE) + /* Pre-pend the vdef we may have built. */ + if (build_vdef != NULL_TREE) { - struct ssa_operand_memory_d *ptr; - ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d)); - ptr->next = operand_memory; - operand_memory = ptr; - operand_memory_index = 0; + tree oldvdef = gimple_vdef (stmt); + if (oldvdef + && TREE_CODE (oldvdef) == SSA_NAME) + oldvdef = SSA_NAME_VAR (oldvdef); + if (oldvdef != build_vdef) + gimple_set_vdef (stmt, build_vdef); + VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt)); + ++num; } - ptr = &(operand_memory->mem[operand_memory_index]); - operand_memory_index += size; - return ptr; -} - -/* Make sure PTR is inn the correct immediate use list. Since uses are simply - pointers into the stmt TREE, there is no way of telling if anyone has - changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>. - THe contents are different, but the the pointer is still the same. This - routine will check to make sure PTR is in the correct list, and if it isn't - put it in the correct list. We cannot simply check the previous node - because all nodes in the same stmt might have be changed. */ + new_list.next = NULL; + last = &new_list; -static inline void -correct_use_link (use_operand_p ptr, tree stmt) -{ - use_operand_p prev; - tree root; + old_ops = gimple_def_ops (stmt); - /* Fold_stmt () may have changed the stmt pointers. */ - if (ptr->stmt != stmt) - ptr->stmt = stmt; + new_i = 0; - prev = ptr->prev; - if (prev) + /* Clear and unlink a no longer necessary VDEF. */ + if (build_vdef == NULL_TREE + && gimple_vdef (stmt) != NULL_TREE) { - bool stmt_mod = true; - /* Find the first element which isn't a SAFE iterator, is in a different - stmt, and is not a a modified stmt, That node is in the correct list, - see if we are too. */ - - while (stmt_mod) + if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) { - while (prev->stmt == stmt || prev->stmt == NULL) - prev = prev->prev; - if (prev->use == NULL) - stmt_mod = false; - else - if ((stmt_mod = stmt_modified_p (prev->stmt))) - prev = prev->prev; + unlink_stmt_vdef (stmt); + release_ssa_name (gimple_vdef (stmt)); } - - /* Get the ssa_name of the list the node is in. */ - if (prev->use == NULL) - root = prev->stmt; - else - root = *(prev->use); - /* If it's the right list, simply return. */ - if (root == *(ptr->use)) - return; + gimple_set_vdef (stmt, NULL_TREE); } - /* Its in the wrong list if we reach here. */ - delink_imm_use (ptr); - link_imm_use (ptr, *(ptr->use)); -} + /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ + if (gimple_vdef (stmt) + && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) + mark_sym_for_renaming (gimple_vdef (stmt)); -#define FINALIZE_OPBUILD build_defs -#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I)) -#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I)) -#define FINALIZE_FUNC finalize_ssa_def_ops -#define FINALIZE_ALLOC alloc_def -#define FINALIZE_FREE free_defs -#define FINALIZE_TYPE struct def_optype_d -#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr) -#define FINALIZE_OPS DEF_OPS -#define FINALIZE_BASE(VAR) VAR -#define FINALIZE_BASE_TYPE tree * -#define FINALIZE_BASE_ZERO NULL -#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL) -#include "tree-ssa-opfinalize.h" - - -/* This routine will create stmt operands for STMT from the def build list. */ + /* Check for the common case of 1 def that hasn't changed. */ + if (old_ops && old_ops->next == NULL && num == 1 + && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) + return; -static void -finalize_ssa_defs (tree stmt) -{ - unsigned int num = opbuild_num_elems (&build_defs); - /* There should only be a single real definition per assignment. */ - gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1); + /* If there is anything in the old list, free it. */ + if (old_ops) + { + old_ops->next = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs = old_ops; + } - /* If there is an old list, often the new list is identical, or close, so - find the elements at the beginning that are the same as the vector. */ + /* If there is anything remaining in the build_defs list, simply emit it. */ + for ( ; new_i < num; new_i++) + last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); - finalize_ssa_def_ops (stmt); - opbuild_clear (&build_defs); + /* Now set the stmt's operands. */ + gimple_set_def_ops (stmt, new_list.next); } -#define FINALIZE_OPBUILD build_uses -#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I)) -#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I)) -#define FINALIZE_FUNC finalize_ssa_use_ops -#define FINALIZE_ALLOC alloc_use -#define FINALIZE_FREE free_uses -#define FINALIZE_TYPE struct use_optype_d -#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use) -#define FINALIZE_OPS USE_OPS -#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR) -#define FINALIZE_BASE(VAR) VAR -#define FINALIZE_BASE_TYPE tree * -#define FINALIZE_BASE_ZERO NULL -#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ - (PTR)->use_ptr.use = (VAL); \ - link_imm_use_stmt (&((PTR)->use_ptr), \ - *(VAL), (STMT)) -#include "tree-ssa-opfinalize.h" - -/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */ - -static void -finalize_ssa_uses (tree stmt) -{ -#ifdef ENABLE_CHECKING - { - unsigned x; - unsigned num = opbuild_num_elems (&build_uses); - - /* If the pointer to the operand is the statement itself, something is - wrong. It means that we are pointing to a local variable (the - initial call to get_stmt_operands does not pass a pointer to a - statement). */ - for (x = 0; x < num; x++) - gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt); - } -#endif - finalize_ssa_use_ops (stmt); - opbuild_clear (&build_uses); -} - - -/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */ -#define FINALIZE_OPBUILD build_v_may_defs -#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I)) -#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I)) -#define FINALIZE_FUNC finalize_ssa_v_may_def_ops -#define FINALIZE_ALLOC alloc_maydef -#define FINALIZE_FREE free_maydefs -#define FINALIZE_TYPE struct maydef_optype_d -#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR) -#define FINALIZE_OPS MAYDEF_OPS -#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR) -#define FINALIZE_BASE_ZERO 0 -#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \ - ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR))) -#define FINALIZE_BASE_TYPE unsigned -#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ - (PTR)->def_var = (VAL); \ - (PTR)->use_var = (VAL); \ - (PTR)->use_ptr.use = &((PTR)->use_var); \ - link_imm_use_stmt (&((PTR)->use_ptr), \ - (VAL), (STMT)) -#include "tree-ssa-opfinalize.h" - - -static void -finalize_ssa_v_may_defs (tree stmt) -{ - finalize_ssa_v_may_def_ops (stmt); -} - -/* Clear the in_list bits and empty the build array for v_may_defs. */ +/* Takes elements from build_uses and turns them into use operands of STMT. + TODO -- Make build_uses VEC of tree *. */ static inline void -cleanup_v_may_defs (void) +finalize_ssa_uses (gimple stmt) { - unsigned x, num; - num = opbuild_num_elems (&build_v_may_defs); + unsigned new_i; + struct use_optype_d new_list; + use_optype_p old_ops, ptr, last; - for (x = 0; x < num; x++) + /* Pre-pend the VUSE we may have built. */ + if (build_vuse != NULL_TREE) { - tree t = opbuild_elem_virtual (&build_v_may_defs, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_v_may_def_list = 0; - } + tree oldvuse = gimple_vuse (stmt); + if (oldvuse + && TREE_CODE (oldvuse) == SSA_NAME) + oldvuse = SSA_NAME_VAR (oldvuse); + if (oldvuse != (build_vuse != NULL_TREE + ? build_vuse : build_vdef)) + gimple_set_vuse (stmt, NULL_TREE); + VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt)); } - opbuild_clear (&build_v_may_defs); -} - - -#define FINALIZE_OPBUILD build_vuses -#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I)) -#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I)) -#define FINALIZE_FUNC finalize_ssa_vuse_ops -#define FINALIZE_ALLOC alloc_vuse -#define FINALIZE_FREE free_vuses -#define FINALIZE_TYPE struct vuse_optype_d -#define FINALIZE_ELEM(PTR) VUSE_OP (PTR) -#define FINALIZE_OPS VUSE_OPS -#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR) -#define FINALIZE_BASE_ZERO 0 -#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \ - ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR))) -#define FINALIZE_BASE_TYPE unsigned -#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ - (PTR)->use_var = (VAL); \ - (PTR)->use_ptr.use = &((PTR)->use_var); \ - link_imm_use_stmt (&((PTR)->use_ptr), \ - (VAL), (STMT)) -#include "tree-ssa-opfinalize.h" - - -/* Return a new vuse operand vector, comparing to OLD_OPS_P. */ - -static void -finalize_ssa_vuses (tree stmt) -{ - unsigned num, num_v_may_defs; - int vuse_index; - /* Remove superfluous VUSE operands. If the statement already has a - V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not - needed because V_MAY_DEFs imply a VUSE of the variable. For instance, - suppose that variable 'a' is aliased: + new_list.next = NULL; + last = &new_list; - # VUSE - # a_3 = V_MAY_DEF - a = a + 1; + old_ops = gimple_use_ops (stmt); - The VUSE is superfluous because it is implied by the V_MAY_DEF - operation. */ + /* Clear a no longer necessary VUSE. */ + if (build_vuse == NULL_TREE + && gimple_vuse (stmt) != NULL_TREE) + gimple_set_vuse (stmt, NULL_TREE); - num = opbuild_num_elems (&build_vuses); - num_v_may_defs = opbuild_num_elems (&build_v_may_defs); + /* If there is anything in the old list, free it. */ + if (old_ops) + { + for (ptr = old_ops; ptr; ptr = ptr->next) + delink_imm_use (USE_OP_PTR (ptr)); + old_ops->next = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses = old_ops; + } - if (num > 0 && num_v_may_defs > 0) + /* If we added a VUSE, make sure to set the operand if it is not already + present and mark it for renaming. */ + if (build_vuse != NULL_TREE + && gimple_vuse (stmt) == NULL_TREE) { - int last = OPBUILD_LAST; - vuse_index = opbuild_first (&build_vuses); - for ( ; vuse_index != OPBUILD_LAST; ) - { - tree vuse; - vuse = opbuild_elem_virtual (&build_vuses, vuse_index); - if (TREE_CODE (vuse) != SSA_NAME) - { - var_ann_t ann = var_ann (vuse); - ann->in_vuse_list = 0; - if (ann->in_v_may_def_list) - { - vuse_index = opbuild_remove_elem (&build_vuses, vuse_index, - last); - continue; - } - } - last = vuse_index; - vuse_index = opbuild_next (&build_vuses, vuse_index); - } + gimple_set_vuse (stmt, gimple_vop (cfun)); + mark_sym_for_renaming (gimple_vop (cfun)); } - else - /* Clear out the in_list bits. */ - for (vuse_index = opbuild_first (&build_vuses); - vuse_index != OPBUILD_LAST; - vuse_index = opbuild_next (&build_vuses, vuse_index)) - { - tree t = opbuild_elem_virtual (&build_vuses, vuse_index); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_vuse_list = 0; - } - } - finalize_ssa_vuse_ops (stmt); - /* The v_may_def build vector wasn't cleaned up because we needed it. */ - cleanup_v_may_defs (); - - /* Free the vuses build vector. */ - opbuild_clear (&build_vuses); + /* Now create nodes for all the new nodes. */ + for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) + last = add_use_op (stmt, + (tree *) VEC_index (tree, build_uses, new_i), + last); + /* Now set the stmt's operands. */ + gimple_set_use_ops (stmt, new_list.next); } - -/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */ - -#define FINALIZE_OPBUILD build_v_must_defs -#define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I)) -#define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I)) -#define FINALIZE_FUNC finalize_ssa_v_must_def_ops -#define FINALIZE_ALLOC alloc_mustdef -#define FINALIZE_FREE free_mustdefs -#define FINALIZE_TYPE struct mustdef_optype_d -#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR) -#define FINALIZE_OPS MUSTDEF_OPS -#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR) -#define FINALIZE_BASE_ZERO 0 -#define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \ - ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR))) -#define FINALIZE_BASE_TYPE unsigned -#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ - (PTR)->def_var = (VAL); \ - (PTR)->kill_var = (VAL); \ - (PTR)->use_ptr.use = &((PTR)->kill_var);\ - link_imm_use_stmt (&((PTR)->use_ptr), \ - (VAL), (STMT)) -#include "tree-ssa-opfinalize.h" -static void -finalize_ssa_v_must_defs (tree stmt) +/* Clear the in_list bits and empty the build array for VDEFs and + VUSEs. */ + +static inline void +cleanup_build_arrays (void) { - /* In the presence of subvars, there may be more than one V_MUST_DEF per - statement (one for each subvar). It is a bit expensive to verify that - all must-defs in a statement belong to subvars if there is more than one - MUST-def, so we don't do it. Suffice to say, if you reach here without - having subvars, and have num >1, you have hit a bug. */ - - finalize_ssa_v_must_def_ops (stmt); - opbuild_clear (&build_v_must_defs); + build_vdef = NULL_TREE; + build_vuse = NULL_TREE; + VEC_truncate (tree, build_defs, 0); + VEC_truncate (tree, build_uses, 0); } /* Finalize all the build vectors, fill the new ones into INFO. */ - + static inline void -finalize_ssa_stmt_operands (tree stmt) +finalize_ssa_stmt_operands (gimple stmt) { finalize_ssa_defs (stmt); finalize_ssa_uses (stmt); - finalize_ssa_v_must_defs (stmt); - finalize_ssa_v_may_defs (stmt); - finalize_ssa_vuses (stmt); + cleanup_build_arrays (); } @@ -781,11 +569,10 @@ finalize_ssa_stmt_operands (tree stmt) static inline void start_ssa_stmt_operands (void) { - gcc_assert (opbuild_num_elems (&build_defs) == 0); - gcc_assert (opbuild_num_elems (&build_uses) == 0); - gcc_assert (opbuild_num_elems (&build_vuses) == 0); - gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0); - gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0); + gcc_assert (VEC_length (tree, build_defs) == 0); + gcc_assert (VEC_length (tree, build_uses) == 0); + gcc_assert (build_vuse == NULL_TREE); + gcc_assert (build_vdef == NULL_TREE); } @@ -794,7 +581,7 @@ start_ssa_stmt_operands (void) static inline void append_def (tree *def_p) { - opbuild_append_real (&build_defs, def_p); + VEC_safe_push (tree, heap, build_defs, (tree) def_p); } @@ -803,597 +590,384 @@ append_def (tree *def_p) static inline void append_use (tree *use_p) { - opbuild_append_real (&build_uses, use_p); + VEC_safe_push (tree, heap, build_uses, (tree) use_p); } -/* Add a new virtual may def for variable VAR to the build array. */ +/* Add VAR to the set of variables that require a VDEF operator. */ static inline void -append_v_may_def (tree var) +append_vdef (tree var) { - if (TREE_CODE (var) != SSA_NAME) - { - var_ann_t ann = get_var_ann (var); + if (!optimize) + return; - /* Don't allow duplicate entries. */ - if (ann->in_v_may_def_list) - return; - ann->in_v_may_def_list = 1; - } + gcc_assert ((build_vdef == NULL_TREE + || build_vdef == var) + && (build_vuse == NULL_TREE + || build_vuse == var)); - opbuild_append_virtual (&build_v_may_defs, var); + build_vdef = var; + build_vuse = var; } -/* Add VAR to the list of virtual uses. */ +/* Add VAR to the set of variables that require a VUSE operator. */ static inline void append_vuse (tree var) { + if (!optimize) + return; - /* Don't allow duplicate entries. */ - if (TREE_CODE (var) != SSA_NAME) - { - var_ann_t ann = get_var_ann (var); - - if (ann->in_vuse_list || ann->in_v_may_def_list) - return; - ann->in_vuse_list = 1; - } + gcc_assert (build_vuse == NULL_TREE + || build_vuse == var); - opbuild_append_virtual (&build_vuses, var); + build_vuse = var; } +/* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ -/* Add VAR to the list of virtual must definitions for INFO. */ - -static inline void -append_v_must_def (tree var) +static void +add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) { - unsigned i; + /* Add virtual operands to the stmt, unless the caller has specifically + requested not to do that (used when adding operands inside an + ADDR_EXPR expression). */ + if (flags & opf_no_vops) + return; - /* Don't allow duplicate entries. */ - for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++) - if (var == opbuild_elem_virtual (&build_v_must_defs, i)) - return; + gcc_assert (!is_gimple_debug (stmt)); - opbuild_append_virtual (&build_v_must_defs, var); + if (flags & opf_def) + append_vdef (gimple_vop (cfun)); + else + append_vuse (gimple_vop (cfun)); } -/* Parse STMT looking for operands. OLD_OPS is the original stmt operand - cache for STMT, if it existed before. When finished, the various build_* - operand vectors will have potential operands. in them. */ - +/* Add *VAR_P to the appropriate operand array for statement STMT. + FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, + it will be added to the statement's real operands, otherwise it is + added to virtual operands. */ + static void -parse_ssa_operands (tree stmt) +add_stmt_operand (tree *var_p, gimple stmt, int flags) { - enum tree_code code; + tree var, sym; - code = TREE_CODE (stmt); - switch (code) - { - case MODIFY_EXPR: - /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if - either only part of LHS is modified or if the RHS might throw, - otherwise, use V_MUST_DEF. + gcc_assert (SSA_VAR_P (*var_p)); - ??? If it might throw, we should represent somehow that it is killed - on the fallthrough path. */ - { - tree lhs = TREE_OPERAND (stmt, 0); - int lhs_flags = opf_is_def; + var = *var_p; + sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); - get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none); + /* Mark statements with volatile operands. */ + if (TREE_THIS_VOLATILE (sym)) + gimple_set_has_volatile_ops (stmt, true); - /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether - or not the entire LHS is modified; that depends on what's - inside the VIEW_CONVERT_EXPR. */ - if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) - lhs = TREE_OPERAND (lhs, 0); + if (is_gimple_reg (sym)) + { + /* The variable is a GIMPLE register. Add it to real operands. */ + if (flags & opf_def) + append_def (var_p); + else + append_use (var_p); + } + else + add_virtual_operand (stmt, flags); +} - if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF - && TREE_CODE (lhs) != BIT_FIELD_REF - && TREE_CODE (lhs) != REALPART_EXPR - && TREE_CODE (lhs) != IMAGPART_EXPR) - lhs_flags |= opf_kill_def; +/* Mark the base address of REF as having its address taken. + REF may be a single variable whose address has been taken or any + other valid GIMPLE memory reference (structure reference, array, + etc). */ - get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags); - } - break; +static void +mark_address_taken (tree ref) +{ + tree var; - case COND_EXPR: - get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none); - break; + /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF + as the only thing we take the address of. If VAR is a structure, + taking the address of a field means that the whole structure may + be referenced using pointer arithmetic. See PR 21407 and the + ensuing mailing list discussion. */ + var = get_base_address (ref); + if (var && DECL_P (var)) + TREE_ADDRESSABLE (var) = 1; +} - case SWITCH_EXPR: - get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none); - break; - case ASM_EXPR: - get_asm_expr_operands (stmt); - break; +/* A subroutine of get_expr_operands to handle INDIRECT_REF, + ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. - case RETURN_EXPR: - get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none); - break; + STMT is the statement being processed, EXPR is the INDIRECT_REF + that got us here. - case GOTO_EXPR: - get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none); - break; + FLAGS is as in get_expr_operands. - case LABEL_EXPR: - get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none); - break; + RECURSE_ON_BASE should be set to true if we want to continue + calling get_expr_operands on the base pointer, and false if + something else will do it for us. */ - /* These nodes contain no variable references. */ - case BIND_EXPR: - case CASE_LABEL_EXPR: - case TRY_CATCH_EXPR: - case TRY_FINALLY_EXPR: - case EH_FILTER_EXPR: - case CATCH_EXPR: - case RESX_EXPR: - break; +static void +get_indirect_ref_operands (gimple stmt, tree expr, int flags, + bool recurse_on_base) +{ + tree *pptr = &TREE_OPERAND (expr, 0); - default: - /* Notice that if get_expr_operands tries to use &STMT as the operand - pointer (which may only happen for USE operands), we will fail in - append_use. This default will handle statements like empty - statements, or CALL_EXPRs that may appear on the RHS of a statement - or as statements themselves. */ - get_expr_operands (stmt, &stmt, opf_none); - break; - } -} + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); -/* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the - original operands, and if ANN is non-null, appropriate stmt flags are set - in the stmt's annotation. If ANN is NULL, this is not considered a "real" - stmt, and none of the operands will be entered into their respective - immediate uses tables. This is to allow stmts to be processed when they - are not actually in the CFG. + /* Add the VOP. */ + add_virtual_operand (stmt, flags); - Note that some fields in old_ops may change to NULL, although none of the - memory they originally pointed to will be destroyed. It is appropriate - to call free_stmt_operands() on the value returned in old_ops. + /* If requested, add a USE operand for the base pointer. */ + if (recurse_on_base) + get_expr_operands (stmt, pptr, + opf_use | (flags & opf_no_vops)); +} - The rationale for this: Certain optimizations wish to examine the difference - between new_ops and old_ops after processing. If a set of operands don't - change, new_ops will simply assume the pointer in old_ops, and the old_ops - pointer will be set to NULL, indicating no memory needs to be cleared. - Usage might appear something like: - old_ops_copy = old_ops = stmt_ann(stmt)->operands; - build_ssa_operands (stmt, NULL, &old_ops, &new_ops); - <* compare old_ops_copy and new_ops *> - free_ssa_operands (old_ops); */ +/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ static void -build_ssa_operands (tree stmt) +get_tmr_operands (gimple stmt, tree expr, int flags) { - stmt_ann_t ann = get_stmt_ann (stmt); - - /* Initially assume that the statement has no volatile operands, nor - makes aliased loads or stores. */ - if (ann) - { - ann->has_volatile_ops = false; - ann->makes_aliased_stores = false; - ann->makes_aliased_loads = false; - } + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); - start_ssa_stmt_operands (); + /* First record the real operands. */ + get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); + get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); - parse_ssa_operands (stmt); + if (TMR_SYMBOL (expr)) + mark_address_taken (TMR_SYMBOL (expr)); - finalize_ssa_stmt_operands (stmt); + add_virtual_operand (stmt, flags); } -/* Free any operands vectors in OPS. */ -#if 0 -static void -free_ssa_operands (stmt_operands_p ops) +/* If STMT is a call that may clobber globals and other symbols that + escape, add them to the VDEF/VUSE lists for it. */ + +static void +maybe_add_call_vops (gimple stmt) { - ops->def_ops = NULL; - ops->use_ops = NULL; - ops->maydef_ops = NULL; - ops->mustdef_ops = NULL; - ops->vuse_ops = NULL; - while (ops->memory.next != NULL) + int call_flags = gimple_call_flags (stmt); + + /* If aliases have been computed already, add VDEF or VUSE + operands for all the symbols that have been found to be + call-clobbered. */ + if (!(call_flags & ECF_NOVOPS)) { - operand_memory_p tmp = ops->memory.next; - ops->memory.next = tmp->next; - ggc_free (tmp); + /* A 'pure' or a 'const' function never call-clobbers anything. + A 'noreturn' function might, but since we don't return anyway + there is no point in recording that. */ + if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) + add_virtual_operand (stmt, opf_def); + else if (!(call_flags & ECF_CONST)) + add_virtual_operand (stmt, opf_use); } } -#endif -/* Get the operands of statement STMT. Note that repeated calls to - get_stmt_operands for the same statement will do nothing until the - statement is marked modified by a call to mark_stmt_modified(). */ +/* Scan operands in the ASM_EXPR stmt referred to in INFO. */ -void -update_stmt_operands (tree stmt) +static void +get_asm_expr_operands (gimple stmt) { - stmt_ann_t ann = get_stmt_ann (stmt); - /* If get_stmt_operands is called before SSA is initialized, dont - do anything. */ - if (!ssa_operands_active ()) - return; - /* The optimizers cannot handle statements that are nothing but a - _DECL. This indicates a bug in the gimplifier. */ - gcc_assert (!SSA_VAR_P (stmt)); - - gcc_assert (ann->modified); - - timevar_push (TV_TREE_OPS); - - build_ssa_operands (stmt); - - /* Clear the modified bit for STMT. Subsequent calls to - get_stmt_operands for this statement will do nothing until the - statement is marked modified by a call to mark_stmt_modified(). */ - ann->modified = 0; - - timevar_pop (TV_TREE_OPS); -} + size_t i, noutputs; + const char **oconstraints; + const char *constraint; + bool allows_mem, allows_reg, is_inout; - -/* Copies virtual operands from SRC to DST. */ + noutputs = gimple_asm_noutputs (stmt); + oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); -void -copy_virtual_operands (tree dest, tree src) -{ - tree t; - ssa_op_iter iter, old_iter; - use_operand_p use_p, u2; - def_operand_p def_p, d2; - - build_ssa_operands (dest); - - /* Copy all the virtual fields. */ - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE) - append_vuse (t); - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF) - append_v_may_def (t); - FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF) - append_v_must_def (t); - - if (opbuild_num_elems (&build_vuses) == 0 - && opbuild_num_elems (&build_v_may_defs) == 0 - && opbuild_num_elems (&build_v_must_defs) == 0) - return; - - /* Now commit the virtual operands to this stmt. */ - finalize_ssa_v_must_defs (dest); - finalize_ssa_v_may_defs (dest); - finalize_ssa_vuses (dest); + /* Gather all output operands. */ + for (i = 0; i < gimple_asm_noutputs (stmt); i++) + { + tree link = gimple_asm_output_op (stmt, i); + constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + oconstraints[i] = constraint; + parse_output_constraint (&constraint, i, 0, 0, &allows_mem, + &allows_reg, &is_inout); - /* Finally, set the field to the same values as then originals. */ + /* This should have been split in gimplify_asm_expr. */ + gcc_assert (!allows_reg || !is_inout); - - t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE); - FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE) - { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, t); - t = op_iter_next_tree (&old_iter); - } - gcc_assert (op_iter_done (&old_iter)); + /* Memory operands are addressable. Note that STMT needs the + address of this operand. */ + if (!allows_reg && allows_mem) + mark_address_taken (TREE_VALUE (link)); - op_iter_init_maydef (&old_iter, src, &u2, &d2); - FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter) - { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, USE_FROM_PTR (u2)); - SET_DEF (def_p, DEF_FROM_PTR (d2)); - op_iter_next_maymustdef (&u2, &d2, &old_iter); + get_expr_operands (stmt, &TREE_VALUE (link), opf_def); } - gcc_assert (op_iter_done (&old_iter)); - op_iter_init_mustdef (&old_iter, src, &u2, &d2); - FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter) + /* Gather all input operands. */ + for (i = 0; i < gimple_asm_ninputs (stmt); i++) { - gcc_assert (!op_iter_done (&old_iter)); - SET_USE (use_p, USE_FROM_PTR (u2)); - SET_DEF (def_p, DEF_FROM_PTR (d2)); - op_iter_next_maymustdef (&u2, &d2, &old_iter); - } - gcc_assert (op_iter_done (&old_iter)); - -} - - -/* Specifically for use in DOM's expression analysis. Given a store, we - create an artificial stmt which looks like a load from the store, this can - be used to eliminate redundant loads. OLD_OPS are the operands from the - store stmt, and NEW_STMT is the new load which represents a load of the - values stored. */ + tree link = gimple_asm_input_op (stmt, i); + constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, + &allows_mem, &allows_reg); -void -create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt) -{ - stmt_ann_t ann; - tree op; - ssa_op_iter iter; - use_operand_p use_p; - unsigned x; - - ann = get_stmt_ann (new_stmt); - - /* process the stmt looking for operands. */ - start_ssa_stmt_operands (); - parse_ssa_operands (new_stmt); + /* Memory operands are addressable. Note that STMT needs the + address of this operand. */ + if (!allows_reg && allows_mem) + mark_address_taken (TREE_VALUE (link)); - for (x = 0; x < opbuild_num_elems (&build_vuses); x++) - { - tree t = opbuild_elem_virtual (&build_vuses, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_vuse_list = 0; - } - } - - for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++) - { - tree t = opbuild_elem_virtual (&build_v_may_defs, x); - if (TREE_CODE (t) != SSA_NAME) - { - var_ann_t ann = var_ann (t); - ann->in_v_may_def_list = 0; - } + get_expr_operands (stmt, &TREE_VALUE (link), 0); } - /* Remove any virtual operands that were found. */ - opbuild_clear (&build_v_may_defs); - opbuild_clear (&build_v_must_defs); - opbuild_clear (&build_vuses); - - /* For each VDEF on the original statement, we want to create a - VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new - statement. */ - FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, - (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)) - append_vuse (op); - - /* Now build the operands for this new stmt. */ - finalize_ssa_stmt_operands (new_stmt); - - /* All uses in this fake stmt must not be in the immediate use lists. */ - FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) - delink_imm_use (use_p); -} - -static void -swap_tree_operands (tree stmt, tree *exp0, tree *exp1) -{ - tree op0, op1; - op0 = *exp0; - op1 = *exp1; - /* If the operand cache is active, attempt to preserve the relative positions - of these two operands in their respective immediate use lists. */ - if (ssa_operands_active () && op0 != op1) + /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ + for (i = 0; i < gimple_asm_nclobbers (stmt); i++) { - use_optype_p use0, use1, ptr; - use0 = use1 = NULL; - /* Find the 2 operands in the cache, if they are there. */ - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) - if (USE_OP_PTR (ptr)->use == exp0) - { - use0 = ptr; - break; - } - for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) - if (USE_OP_PTR (ptr)->use == exp1) - { - use1 = ptr; - break; - } - /* If both uses don't have operand entries, there isn't much we can do - at this point. Presumably we dont need to worry about it. */ - if (use0 && use1) - { - tree *tmp = USE_OP_PTR (use1)->use; - USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; - USE_OP_PTR (use0)->use = tmp; - } - } - - /* Now swap the data. */ - *exp0 = op1; - *exp1 = op0; + tree link = gimple_asm_clobber_op (stmt, i); + if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) + { + add_virtual_operand (stmt, opf_def); + break; + } + } } -/* Recursively scan the expression pointed by EXPR_P in statement referred to - by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the - operands found. */ +/* Recursively scan the expression pointed to by EXPR_P in statement + STMT. FLAGS is one of the OPF_* constants modifying how to + interpret the operands found. */ static void -get_expr_operands (tree stmt, tree *expr_p, int flags) +get_expr_operands (gimple stmt, tree *expr_p, int flags) { enum tree_code code; - enum tree_code_class class; + enum tree_code_class codeclass; tree expr = *expr_p; - stmt_ann_t s_ann = stmt_ann (stmt); + int uflags = opf_use; if (expr == NULL) return; + if (is_gimple_debug (stmt)) + uflags |= (flags & opf_no_vops); + code = TREE_CODE (expr); - class = TREE_CODE_CLASS (code); + codeclass = TREE_CODE_CLASS (code); switch (code) { case ADDR_EXPR: - /* We could have the address of a component, array member, - etc which has interesting variable references. */ /* Taking the address of a variable does not represent a - reference to it, but the fact that the stmt takes its address will be - of interest to some passes (e.g. alias resolution). */ - add_stmt_operand (expr_p, s_ann, 0); - - /* If the address is invariant, there may be no interesting variable - references inside. */ + reference to it, but the fact that the statement takes its + address will be of interest to some passes (e.g. alias + resolution). */ + if (!is_gimple_debug (stmt)) + mark_address_taken (TREE_OPERAND (expr, 0)); + + /* If the address is invariant, there may be no interesting + variable references inside. */ if (is_gimple_min_invariant (expr)) return; - /* There should be no VUSEs created, since the referenced objects are - not really accessed. The only operands that we should find here - are ARRAY_REF indices which will always be real operands (GIMPLE - does not allow non-registers as array indices). */ + /* Otherwise, there may be variables referenced inside but there + should be no VUSEs created, since the referenced objects are + not really accessed. The only operands that we should find + here are ARRAY_REF indices which will always be real operands + (GIMPLE does not allow non-registers as array indices). */ flags |= opf_no_vops; - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; case SSA_NAME: + add_stmt_operand (expr_p, stmt, flags); + return; + case VAR_DECL: case PARM_DECL: case RESULT_DECL: - case CONST_DECL: - { - subvar_t svars; - - /* Add the subvars for a variable if it has subvars, to DEFS or USES. - Otherwise, add the variable itself. - Whether it goes to USES or DEFS depends on the operand flags. */ - if (var_can_have_subvars (expr) - && (svars = get_subvars_for_var (expr))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - add_stmt_operand (&sv->var, s_ann, flags); - } - else - { - add_stmt_operand (expr_p, s_ann, flags); - } - return; - } + add_stmt_operand (expr_p, stmt, flags); + return; + + case DEBUG_EXPR_DECL: + gcc_assert (gimple_debug_bind_p (stmt)); + return; + case MISALIGNED_INDIRECT_REF: get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); /* fall through */ case ALIGN_INDIRECT_REF: case INDIRECT_REF: - get_indirect_ref_operands (stmt, expr, flags); + get_indirect_ref_operands (stmt, expr, flags, true); return; - case ARRAY_REF: - case ARRAY_RANGE_REF: - /* Treat array references as references to the virtual variable - representing the array. The virtual variable for an ARRAY_REF - is the VAR_DECL for the array. */ - - /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES - according to the value of IS_DEF. Recurse if the LHS of the - ARRAY_REF node is not a regular variable. */ - if (SSA_VAR_P (TREE_OPERAND (expr, 0))) - add_stmt_operand (expr_p, s_ann, flags); - else - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); - - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none); + case TARGET_MEM_REF: + get_tmr_operands (stmt, expr, flags); return; + case ARRAY_REF: + case ARRAY_RANGE_REF: case COMPONENT_REF: case REALPART_EXPR: case IMAGPART_EXPR: { - tree ref; - HOST_WIDE_INT offset, size; - /* This component ref becomes an access to all of the subvariables - it can touch, if we can determine that, but *NOT* the real one. - If we can't determine which fields we could touch, the recursion - will eventually get to a variable and add *all* of its subvars, or - whatever is the minimum correct subset. */ - - ref = okay_component_ref_for_subvars (expr, &offset, &size); - if (ref) - { - subvar_t svars = get_subvars_for_var (ref); - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - { - bool exact; - if (overlap_subvar (offset, size, sv, &exact)) - { - if (exact) - flags &= ~opf_kill_def; - add_stmt_operand (&sv->var, s_ann, flags); - } - } - } - else - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), - flags & ~opf_kill_def); - + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); + if (code == COMPONENT_REF) - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); + { + if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) + gimple_set_has_volatile_ops (stmt, true); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); + } + else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) + { + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); + } + return; } + case WITH_SIZE_EXPR: /* WITH_SIZE_EXPR is a pass-through reference to its first argument, and an rvalue reference to its second argument. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; - case CALL_EXPR: - get_call_expr_operands (stmt, expr); - return; - case COND_EXPR: case VEC_COND_EXPR: - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); return; - case MODIFY_EXPR: - { - int subflags; - tree op; - - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); - - op = TREE_OPERAND (expr, 0); - if (TREE_CODE (op) == WITH_SIZE_EXPR) - op = TREE_OPERAND (expr, 0); - if (TREE_CODE (op) == ARRAY_REF - || TREE_CODE (op) == ARRAY_RANGE_REF - || TREE_CODE (op) == REALPART_EXPR - || TREE_CODE (op) == IMAGPART_EXPR) - subflags = opf_is_def; - else - subflags = opf_is_def | opf_kill_def; - - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags); - return; - } - case CONSTRUCTOR: { /* General aggregate CONSTRUCTORs have been decomposed, but they are still in use as the COMPLEX_EXPR equivalent for vectors. */ + constructor_elt *ce; + unsigned HOST_WIDE_INT idx; - tree t; - for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t)) - get_expr_operands (stmt, &TREE_VALUE (t), opf_none); + for (idx = 0; + VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); + idx++) + get_expr_operands (stmt, &ce->value, uflags); return; } - case TRUTH_NOT_EXPR: case BIT_FIELD_REF: + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + /* FALLTHRU */ + + case TRUTH_NOT_EXPR: case VIEW_CONVERT_EXPR: do_unary: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); @@ -1407,44 +981,12 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) case ASSERT_EXPR: do_binary: { - tree op0 = TREE_OPERAND (expr, 0); - tree op1 = TREE_OPERAND (expr, 1); - - /* If it would be profitable to swap the operands, then do so to - canonicalize the statement, enabling better optimization. - - By placing canonicalization of such expressions here we - transparently keep statements in canonical form, even - when the statement is modified. */ - if (tree_swap_operands_p (op0, op1, false)) - { - /* For relationals we need to swap the operands - and change the code. */ - if (code == LT_EXPR - || code == GT_EXPR - || code == LE_EXPR - || code == GE_EXPR) - { - TREE_SET_CODE (expr, swap_tree_comparison (code)); - swap_tree_operands (stmt, - &TREE_OPERAND (expr, 0), - &TREE_OPERAND (expr, 1)); - } - - /* For a commutative operator we can just swap the operands. */ - else if (commutative_tree_code (code)) - { - swap_tree_operands (stmt, - &TREE_OPERAND (expr, 0), - &TREE_OPERAND (expr, 1)); - } - } - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); return; } + case DOT_PROD_EXPR: case REALIGN_LOAD_EXPR: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); @@ -1453,20 +995,19 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) return; } - case BLOCK: case FUNCTION_DECL: - case EXC_PTR_EXPR: - case FILTER_EXPR: case LABEL_DECL: + case CONST_DECL: + case CASE_LABEL_EXPR: /* Expressions that make no memory references. */ return; default: - if (class == tcc_unary) + if (codeclass == tcc_unary) goto do_unary; - if (class == tcc_binary || class == tcc_comparison) + if (codeclass == tcc_binary || codeclass == tcc_comparison) goto do_binary; - if (class == tcc_constant || class == tcc_type) + if (codeclass == tcc_constant || codeclass == tcc_type) return; } @@ -1475,639 +1016,169 @@ get_expr_operands (tree stmt, tree *expr_p, int flags) fprintf (stderr, "unhandled expression in get_expr_operands():\n"); debug_tree (expr); fputs ("\n", stderr); - internal_error ("internal error"); #endif gcc_unreachable (); } -/* Scan operands in the ASM_EXPR stmt referred to in INFO. */ +/* Parse STMT looking for operands. When finished, the various + build_* operand vectors will have potential operands in them. */ static void -get_asm_expr_operands (tree stmt) +parse_ssa_operands (gimple stmt) { - stmt_ann_t s_ann = stmt_ann (stmt); - int noutputs = list_length (ASM_OUTPUTS (stmt)); - const char **oconstraints - = (const char **) alloca ((noutputs) * sizeof (const char *)); - int i; - tree link; - const char *constraint; - bool allows_mem, allows_reg, is_inout; - - for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link)) - { - oconstraints[i] = constraint - = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); - parse_output_constraint (&constraint, i, 0, 0, - &allows_mem, &allows_reg, &is_inout); - - /* This should have been split in gimplify_asm_expr. */ - gcc_assert (!allows_reg || !is_inout); - - /* Memory operands are addressable. Note that STMT needs the - address of this operand. */ - if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t)) - note_addressable (t, s_ann); - } - - get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def); - } + enum gimple_code code = gimple_code (stmt); - for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link)) + if (code == GIMPLE_ASM) + get_asm_expr_operands (stmt); + else if (is_gimple_debug (stmt)) { - constraint - = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); - parse_input_constraint (&constraint, 0, 0, noutputs, 0, - oconstraints, &allows_mem, &allows_reg); - - /* Memory operands are addressable. Note that STMT needs the - address of this operand. */ - if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t)) - note_addressable (t, s_ann); - } - - get_expr_operands (stmt, &TREE_VALUE (link), 0); + if (gimple_debug_bind_p (stmt) + && gimple_debug_bind_has_value_p (stmt)) + get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), + opf_use | opf_no_vops); } - - - /* Clobber memory for asm ("" : : : "memory"); */ - for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link)) - if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) - { - unsigned i; - bitmap_iterator bi; - - /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we - decided to group them). */ - if (global_var) - add_stmt_operand (&global_var, s_ann, opf_is_def); - else - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) - { - tree var = referenced_var (i); - add_stmt_operand (&var, s_ann, opf_is_def); - } - - /* Now clobber all addressables. */ - EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi) - { - tree var = referenced_var (i); - - /* Subvars are explicitly represented in this list, so - we don't need the original to be added to the clobber - ops, but the original *will* be in this list because - we keep the addressability of the original - variable up-to-date so we don't screw up the rest of - the backend. */ - if (var_can_have_subvars (var) - && get_subvars_for_var (var) != NULL) - continue; - - add_stmt_operand (&var, s_ann, opf_is_def); - } - - break; - } -} - -/* A subroutine of get_expr_operands to handle INDIRECT_REF, - ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */ - -static void -get_indirect_ref_operands (tree stmt, tree expr, int flags) -{ - tree *pptr = &TREE_OPERAND (expr, 0); - tree ptr = *pptr; - stmt_ann_t s_ann = stmt_ann (stmt); - - /* Stores into INDIRECT_REF operands are never killing definitions. */ - flags &= ~opf_kill_def; - - if (SSA_VAR_P (ptr)) + else { - struct ptr_info_def *pi = NULL; + size_t i, start = 0; - /* If PTR has flow-sensitive points-to information, use it. */ - if (TREE_CODE (ptr) == SSA_NAME - && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL - && pi->name_mem_tag) - { - /* PTR has its own memory tag. Use it. */ - add_stmt_operand (&pi->name_mem_tag, s_ann, flags); - } - else + if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) { - /* If PTR is not an SSA_NAME or it doesn't have a name - tag, use its type memory tag. */ - var_ann_t v_ann; - - /* If we are emitting debugging dumps, display a warning if - PTR is an SSA_NAME with no flow-sensitive alias - information. That means that we may need to compute - aliasing again. */ - if (dump_file - && TREE_CODE (ptr) == SSA_NAME - && pi == NULL) - { - fprintf (dump_file, - "NOTE: no flow-sensitive alias info for "); - print_generic_expr (dump_file, ptr, dump_flags); - fprintf (dump_file, " in "); - print_generic_stmt (dump_file, stmt, dump_flags); - } - - if (TREE_CODE (ptr) == SSA_NAME) - ptr = SSA_NAME_VAR (ptr); - v_ann = var_ann (ptr); - if (v_ann->type_mem_tag) - add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags); + get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); + start = 1; } - } - - /* If a constant is used as a pointer, we can't generate a real - operand for it but we mark the statement volatile to prevent - optimizations from messing things up. */ - else if (TREE_CODE (ptr) == INTEGER_CST) - { - if (s_ann) - s_ann->has_volatile_ops = true; - return; - } - /* Everything else *should* have been folded elsewhere, but users - are smarter than we in finding ways to write invalid code. We - cannot just assert here. If we were absolutely certain that we - do handle all valid cases, then we could just do nothing here. - That seems optimistic, so attempt to do something logical... */ - else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR) - && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST) - { - /* Make sure we know the object is addressable. */ - pptr = &TREE_OPERAND (ptr, 0); - add_stmt_operand (pptr, s_ann, 0); + for (i = start; i < gimple_num_ops (stmt); i++) + get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); - /* Mark the object itself with a VUSE. */ - pptr = &TREE_OPERAND (*pptr, 0); - get_expr_operands (stmt, pptr, flags); - return; + /* Add call-clobbered operands, if needed. */ + if (code == GIMPLE_CALL) + maybe_add_call_vops (stmt); } - - /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ - else - gcc_unreachable (); - - /* Add a USE operand for the base pointer. */ - get_expr_operands (stmt, pptr, opf_none); } -/* A subroutine of get_expr_operands to handle CALL_EXPR. */ + +/* Create an operands cache for STMT. */ static void -get_call_expr_operands (tree stmt, tree expr) +build_ssa_operands (gimple stmt) { - tree op; - int call_flags = call_expr_flags (expr); - - /* If aliases have been computed already, add V_MAY_DEF or V_USE - operands for all the symbols that have been found to be - call-clobbered. - - Note that if aliases have not been computed, the global effects - of calls will not be included in the SSA web. This is fine - because no optimizer should run before aliases have been - computed. By not bothering with virtual operands for CALL_EXPRs - we avoid adding superfluous virtual operands, which can be a - significant compile time sink (See PR 15855). */ - if (aliases_computed_p - && !bitmap_empty_p (call_clobbered_vars) - && !(call_flags & ECF_NOVOPS)) - { - /* A 'pure' or a 'const' function never call-clobbers anything. - A 'noreturn' function might, but since we don't return anyway - there is no point in recording that. */ - if (TREE_SIDE_EFFECTS (expr) - && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) - add_call_clobber_ops (stmt); - else if (!(call_flags & ECF_CONST)) - add_call_read_ops (stmt); - } - - /* Find uses in the called function. */ - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); - - for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op)) - get_expr_operands (stmt, &TREE_VALUE (op), opf_none); - - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); + /* Initially assume that the statement has no volatile operands. */ + gimple_set_has_volatile_ops (stmt, false); + start_ssa_stmt_operands (); + parse_ssa_operands (stmt); + finalize_ssa_stmt_operands (stmt); } -/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in - get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to - the statement's real operands, otherwise it is added to virtual - operands. */ +/* Releases the operands of STMT back to their freelists, and clears + the stmt operand lists. */ -static void -add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) +void +free_stmt_operands (gimple stmt) { - bool is_real_op; - tree var, sym; - var_ann_t v_ann; - - var = *var_p; - STRIP_NOPS (var); + def_optype_p defs = gimple_def_ops (stmt), last_def; + use_optype_p uses = gimple_use_ops (stmt), last_use; - /* If the operand is an ADDR_EXPR, add its operand to the list of - variables that have had their address taken in this statement. */ - if (TREE_CODE (var) == ADDR_EXPR) + if (defs) { - note_addressable (TREE_OPERAND (var, 0), s_ann); - return; + for (last_def = defs; last_def->next; last_def = last_def->next) + continue; + last_def->next = gimple_ssa_operands (cfun)->free_defs; + gimple_ssa_operands (cfun)->free_defs = defs; + gimple_set_def_ops (stmt, NULL); } - /* If the original variable is not a scalar, it will be added to the list - of virtual operands. In that case, use its base symbol as the virtual - variable representing it. */ - is_real_op = is_gimple_reg (var); - if (!is_real_op && !DECL_P (var)) - var = get_virtual_var (var); - - /* If VAR is not a variable that we care to optimize, do nothing. */ - if (var == NULL_TREE || !SSA_VAR_P (var)) - return; - - sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); - v_ann = var_ann (sym); - - /* Mark statements with volatile operands. Optimizers should back - off from statements having volatile operands. */ - if (TREE_THIS_VOLATILE (sym) && s_ann) - s_ann->has_volatile_ops = true; - - /* If the variable cannot be modified and this is a V_MAY_DEF change - it into a VUSE. This happens when read-only variables are marked - call-clobbered and/or aliased to writeable variables. So we only - check that this only happens on stores, and not writes to GIMPLE - registers. - - FIXME: The C++ FE is emitting assignments in the IL stream for - read-only globals. This is wrong, but for the time being disable - this transformation on V_MUST_DEF operands (otherwise, we - mis-optimize SPEC2000's eon). */ - if ((flags & opf_is_def) - && !(flags & opf_kill_def) - && unmodifiable_var_p (var)) + if (uses) { - gcc_assert (!is_real_op); - flags &= ~opf_is_def; + for (last_use = uses; last_use->next; last_use = last_use->next) + delink_imm_use (USE_OP_PTR (last_use)); + delink_imm_use (USE_OP_PTR (last_use)); + last_use->next = gimple_ssa_operands (cfun)->free_uses; + gimple_ssa_operands (cfun)->free_uses = uses; + gimple_set_use_ops (stmt, NULL); } - if (is_real_op) - { - /* The variable is a GIMPLE register. Add it to real operands. */ - if (flags & opf_is_def) - append_def (var_p); - else - append_use (var_p); - } - else + if (gimple_has_mem_ops (stmt)) { - varray_type aliases; - - /* The variable is not a GIMPLE register. Add it (or its aliases) to - virtual operands, unless the caller has specifically requested - not to add virtual operands (used when adding operands inside an - ADDR_EXPR expression). */ - if (flags & opf_no_vops) - return; - - aliases = v_ann->may_aliases; - - if (aliases == NULL) - { - /* The variable is not aliased or it is an alias tag. */ - if (flags & opf_is_def) - { - if (flags & opf_kill_def) - { - /* Only regular variables or struct fields may get a - V_MUST_DEF operand. */ - gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG - || v_ann->mem_tag_kind == STRUCT_FIELD); - /* V_MUST_DEF for non-aliased, non-GIMPLE register - variable definitions. */ - append_v_must_def (var); - } - else - { - /* Add a V_MAY_DEF for call-clobbered variables and - memory tags. */ - append_v_may_def (var); - } - } - else - { - append_vuse (var); - if (s_ann && v_ann->is_alias_tag) - s_ann->makes_aliased_loads = 1; - } - } - else - { - size_t i; - - /* The variable is aliased. Add its aliases to the virtual - operands. */ - gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0); - - if (flags & opf_is_def) - { - bool added_may_defs_p = false; - - /* If the variable is also an alias tag, add a virtual - operand for it, otherwise we will miss representing - references to the members of the variable's alias set. - This fixes the bug in gcc.c-torture/execute/20020503-1.c. */ - if (v_ann->is_alias_tag) - { - added_may_defs_p = true; - append_v_may_def (var); - } - - for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++) - { - /* While VAR may be modifiable, some of its aliases - may not be. If that's the case, we don't really - need to add them a V_MAY_DEF for them. */ - tree alias = VARRAY_TREE (aliases, i); - - if (unmodifiable_var_p (alias)) - append_vuse (alias); - else - { - append_v_may_def (alias); - added_may_defs_p = true; - } - } - - if (s_ann && added_may_defs_p) - s_ann->makes_aliased_stores = 1; - } - else - { - /* Similarly, append a virtual uses for VAR itself, when - it is an alias tag. */ - if (v_ann->is_alias_tag) - append_vuse (var); - - for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++) - append_vuse (VARRAY_TREE (aliases, i)); - - if (s_ann) - s_ann->makes_aliased_loads = 1; - } - } + gimple_set_vuse (stmt, NULL_TREE); + gimple_set_vdef (stmt, NULL_TREE); } } - -/* Record that VAR had its address taken in the statement with annotations - S_ANN. */ - -static void -note_addressable (tree var, stmt_ann_t s_ann) -{ - tree ref; - subvar_t svars; - HOST_WIDE_INT offset; - HOST_WIDE_INT size; - - if (!s_ann) - return; - - /* If this is a COMPONENT_REF, and we know exactly what it touches, we only - take the address of the subvariables it will touch. - Otherwise, we take the address of all the subvariables, plus the real - ones. */ - - if (var && TREE_CODE (var) == COMPONENT_REF - && (ref = okay_component_ref_for_subvars (var, &offset, &size))) - { - subvar_t sv; - svars = get_subvars_for_var (ref); - - if (s_ann->addresses_taken == NULL) - s_ann->addresses_taken = BITMAP_GGC_ALLOC (); - - for (sv = svars; sv; sv = sv->next) - { - if (overlap_subvar (offset, size, sv, NULL)) - bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid); - } - return; - } - - var = get_base_address (var); - if (var && SSA_VAR_P (var)) - { - if (s_ann->addresses_taken == NULL) - s_ann->addresses_taken = BITMAP_GGC_ALLOC (); - - - if (var_can_have_subvars (var) - && (svars = get_subvars_for_var (var))) - { - subvar_t sv; - for (sv = svars; sv; sv = sv->next) - bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid); - } - else - bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid); - } -} -/* Add clobbering definitions for .GLOBAL_VAR or for each of the call - clobbered variables in the function. */ +/* Get the operands of statement STMT. */ -static void -add_call_clobber_ops (tree stmt) +void +update_stmt_operands (gimple stmt) { - int i; - unsigned u; - tree t; - bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); - struct stmt_ann_d empty_ann; - - /* Functions that are not const, pure or never return may clobber - call-clobbered variables. */ - if (s_ann) - s_ann->makes_clobbering_call = true; - - /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases - for the heuristic used to decide whether to create .GLOBAL_VAR or not. */ - if (global_var) - { - add_stmt_operand (&global_var, s_ann, opf_is_def); - return; - } - - /* If cache is valid, copy the elements into the build vectors. */ - if (ssa_call_clobbered_cache_valid) - { - /* Process the caches in reverse order so we are always inserting at - the head of the list. */ - for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--) - { - t = VEC_index (tree, clobbered_vuses, i); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_vuse_list = 1; - opbuild_append_virtual (&build_vuses, t); - } - for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--) - { - t = VEC_index (tree, clobbered_v_may_defs, i); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_v_may_def_list = 1; - opbuild_append_virtual (&build_v_may_defs, t); - } - if (s_ann) - { - s_ann->makes_aliased_loads = clobbered_aliased_loads; - s_ann->makes_aliased_stores = clobbered_aliased_stores; - } - return; - } - - memset (&empty_ann, 0, sizeof (struct stmt_ann_d)); - - /* Add a V_MAY_DEF operand for every call clobbered variable. */ - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) - { - tree var = referenced_var (u); - if (unmodifiable_var_p (var)) - add_stmt_operand (&var, &empty_ann, opf_none); - else - add_stmt_operand (&var, &empty_ann, opf_is_def); - } - - clobbered_aliased_loads = empty_ann.makes_aliased_loads; - clobbered_aliased_stores = empty_ann.makes_aliased_stores; - - /* Set the flags for a stmt's annotation. */ - if (s_ann) - { - s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads; - s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores; - } - - /* Prepare empty cache vectors. */ - VEC_truncate (tree, clobbered_vuses, 0); - VEC_truncate (tree, clobbered_v_may_defs, 0); - - /* Now fill the clobbered cache with the values that have been found. */ - for (i = opbuild_first (&build_vuses); - i != OPBUILD_LAST; - i = opbuild_next (&build_vuses, i)) - VEC_safe_push (tree, heap, clobbered_vuses, - opbuild_elem_virtual (&build_vuses, i)); - - gcc_assert (opbuild_num_elems (&build_vuses) - == VEC_length (tree, clobbered_vuses)); + /* If update_stmt_operands is called before SSA is initialized, do + nothing. */ + if (!ssa_operands_active ()) + return; - for (i = opbuild_first (&build_v_may_defs); - i != OPBUILD_LAST; - i = opbuild_next (&build_v_may_defs, i)) - VEC_safe_push (tree, heap, clobbered_v_may_defs, - opbuild_elem_virtual (&build_v_may_defs, i)); + timevar_push (TV_TREE_OPS); - gcc_assert (opbuild_num_elems (&build_v_may_defs) - == VEC_length (tree, clobbered_v_may_defs)); + gcc_assert (gimple_modified_p (stmt)); + build_ssa_operands (stmt); + gimple_set_modified (stmt, false); - ssa_call_clobbered_cache_valid = true; + timevar_pop (TV_TREE_OPS); } -/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the - function. */ +/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done + to test the validity of the swap operation. */ -static void -add_call_read_ops (tree stmt) +void +swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) { - int i; - unsigned u; - tree t; - bitmap_iterator bi; - stmt_ann_t s_ann = stmt_ann (stmt); - struct stmt_ann_d empty_ann; - - /* if the function is not pure, it may reference memory. Add - a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var - for the heuristic used to decide whether to create .GLOBAL_VAR. */ - if (global_var) - { - add_stmt_operand (&global_var, s_ann, opf_none); - return; - } - - /* If cache is valid, copy the elements into the build vector. */ - if (ssa_ro_call_cache_valid) - { - for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--) - { - /* Process the caches in reverse order so we are always inserting at - the head of the list. */ - t = VEC_index (tree, ro_call_vuses, i); - gcc_assert (TREE_CODE (t) != SSA_NAME); - var_ann (t)->in_vuse_list = 1; - opbuild_append_virtual (&build_vuses, t); - } - if (s_ann) - s_ann->makes_aliased_loads = ro_call_aliased_loads; - return; - } - - memset (&empty_ann, 0, sizeof (struct stmt_ann_d)); + tree op0, op1; + op0 = *exp0; + op1 = *exp1; - /* Add a VUSE for each call-clobbered variable. */ - EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) + /* If the operand cache is active, attempt to preserve the relative + positions of these two operands in their respective immediate use + lists. */ + if (ssa_operands_active () && op0 != op1) { - tree var = referenced_var (u); - add_stmt_operand (&var, &empty_ann, opf_none); - } - - ro_call_aliased_loads = empty_ann.makes_aliased_loads; - if (s_ann) - s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads; + use_optype_p use0, use1, ptr; + use0 = use1 = NULL; - /* Prepare empty cache vectors. */ - VEC_truncate (tree, ro_call_vuses, 0); + /* Find the 2 operands in the cache, if they are there. */ + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) + if (USE_OP_PTR (ptr)->use == exp0) + { + use0 = ptr; + break; + } - /* Now fill the clobbered cache with the values that have been found. */ - for (i = opbuild_first (&build_vuses); - i != OPBUILD_LAST; - i = opbuild_next (&build_vuses, i)) - VEC_safe_push (tree, heap, ro_call_vuses, - opbuild_elem_virtual (&build_vuses, i)); + for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) + if (USE_OP_PTR (ptr)->use == exp1) + { + use1 = ptr; + break; + } - gcc_assert (opbuild_num_elems (&build_vuses) - == VEC_length (tree, ro_call_vuses)); + /* If both uses don't have operand entries, there isn't much we can do + at this point. Presumably we don't need to worry about it. */ + if (use0 && use1) + { + tree *tmp = USE_OP_PTR (use1)->use; + USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; + USE_OP_PTR (use0)->use = tmp; + } + } - ssa_ro_call_cache_valid = true; + /* Now swap the data. */ + *exp0 = op1; + *exp1 = op0; } /* Scan the immediate_use list for VAR making sure its linked properly. - return RTUE iof there is a problem. */ + Return TRUE if there is a problem and emit an error message to F. */ bool verify_imm_links (FILE *f, tree var) @@ -2132,7 +1203,7 @@ verify_imm_links (FILE *f, tree var) { if (prev != ptr->prev) goto error; - + if (ptr->use == NULL) goto error; /* 2 roots, or SAFE guard node. */ else if (*(ptr->use) != var) @@ -2140,8 +1211,10 @@ verify_imm_links (FILE *f, tree var) prev = ptr; ptr = ptr->next; - /* Avoid infinite loops. */ - if (count++ > 30000) + + /* Avoid infinite loops. 50,000,000 uses probably indicates a + problem. */ + if (count++ > 50000000) goto error; } @@ -2163,12 +1236,12 @@ verify_imm_links (FILE *f, tree var) return false; error: - if (ptr->stmt && stmt_modified_p (ptr->stmt)) + if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) { - fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt); - print_generic_stmt (f, ptr->stmt, TDF_SLIM); + fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); + print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); } - fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, + fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, (void *)ptr->use); print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); fprintf(f, "\n"); @@ -2198,14 +1271,18 @@ dump_immediate_uses_for (FILE *file, tree var) FOR_EACH_IMM_USE_FAST (use_p, iter, var) { - if (!is_gimple_reg (USE_FROM_PTR (use_p))) - print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS); + if (use_p->loc.stmt == NULL && use_p->use == NULL) + fprintf (file, "***end of stmt iterator marker***\n"); else - print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM); + if (!is_gimple_reg (USE_FROM_PTR (use_p))) + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); + else + print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); } fprintf(file, "\n"); } + /* Dump all the immediate uses to FILE. */ void @@ -2233,6 +1310,7 @@ debug_immediate_uses (void) dump_immediate_uses (stderr); } + /* Dump def-use edges on stderr. */ void @@ -2240,4 +1318,29 @@ debug_immediate_uses_for (tree var) { dump_immediate_uses_for (stderr, var); } -#include "gt-tree-ssa-operands.h" + + +/* Unlink STMTs virtual definition from the IL by propagating its use. */ + +void +unlink_stmt_vdef (gimple stmt) +{ + use_operand_p use_p; + imm_use_iterator iter; + gimple use_stmt; + tree vdef = gimple_vdef (stmt); + + if (!vdef + || TREE_CODE (vdef) != SSA_NAME) + return; + + FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt)) + { + FOR_EACH_IMM_USE_ON_STMT (use_p, iter) + SET_USE (use_p, gimple_vuse (stmt)); + } + + if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt))) + SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1; +} +