X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-operands.c;h=7f76cbfe9c090d46ee1cf6dff4d2a5e60f5ca469;hb=467fa2ada0cbc1b7b81db7567540e8e46dbca920;hp=16f4a43dba32178067743d6c5c6036a7ca67af51;hpb=afcada6efe0e14683e087749cc108f06f4a1b7c1;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c index 16f4a43dba3..7f76cbfe9c0 100644 --- a/gcc/tree-ssa-operands.c +++ b/gcc/tree-ssa-operands.c @@ -1,5 +1,5 @@ /* SSA operands management for trees. - Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 + Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -25,50 +25,50 @@ along with GCC; see the file COPYING3. If not see #include "tree.h" #include "flags.h" #include "function.h" -#include "diagnostic.h" +#include "tree-pretty-print.h" +#include "gimple-pretty-print.h" #include "tree-flow.h" #include "tree-inline.h" #include "tree-pass.h" #include "ggc.h" #include "timevar.h" -#include "toplev.h" #include "langhooks.h" #include "ipa-reference.h" -/* This file contains the code required to manage the operands cache of the - SSA optimizer. For every stmt, we maintain an operand cache in the stmt - annotation. This cache contains operands that will be of interest to - optimizers and other passes wishing to manipulate the IL. +/* This file contains the code required to manage the operands cache of the + SSA optimizer. For every stmt, we maintain an operand cache in the stmt + annotation. This cache contains operands that will be of interest to + optimizers and other passes wishing to manipulate the IL. - The operand type are broken up into REAL and VIRTUAL operands. The real - operands are represented as pointers into the stmt's operand tree. Thus + The operand type are broken up into REAL and VIRTUAL operands. The real + operands are represented as pointers into the stmt's operand tree. Thus any manipulation of the real operands will be reflected in the actual tree. - Virtual operands are represented solely in the cache, although the base - variable for the SSA_NAME may, or may not occur in the stmt's tree. + Virtual operands are represented solely in the cache, although the base + variable for the SSA_NAME may, or may not occur in the stmt's tree. Manipulation of the virtual operands will not be reflected in the stmt tree. - The routines in this file are concerned with creating this operand cache + The routines in this file are concerned with creating this operand cache from a stmt tree. - The operand tree is the parsed by the various get_* routines which look - through the stmt tree for the occurrence of operands which may be of - interest, and calls are made to the append_* routines whenever one is - found. There are 4 of these routines, each representing one of the + The operand tree is the parsed by the various get_* routines which look + through the stmt tree for the occurrence of operands which may be of + interest, and calls are made to the append_* routines whenever one is + found. There are 4 of these routines, each representing one of the 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. - The append_* routines check for duplication, and simply keep a list of + The append_* routines check for duplication, and simply keep a list of unique objects for each operand type in the build_* extendable vectors. - Once the stmt tree is completely parsed, the finalize_ssa_operands() - routine is called, which proceeds to perform the finalization routine + Once the stmt tree is completely parsed, the finalize_ssa_operands() + routine is called, which proceeds to perform the finalization routine on each of the 4 operand vectors which have been built up. - If the stmt had a previous operand cache, the finalization routines - attempt to match up the new operands with the old ones. If it's a perfect - match, the old vector is simply reused. If it isn't a perfect match, then - a new vector is created and the new operands are placed there. For - virtual operands, if the previous cache had SSA_NAME version of a - variable, and that same variable occurs in the same operands cache, then + If the stmt had a previous operand cache, the finalization routines + attempt to match up the new operands with the old ones. If it's a perfect + match, the old vector is simply reused. If it isn't a perfect match, then + a new vector is created and the new operands are placed there. For + virtual operands, if the previous cache had SSA_NAME version of a + variable, and that same variable occurs in the same operands cache, then the new cache vector will also get the same SSA_NAME. i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new @@ -78,7 +78,7 @@ along with GCC; see the file COPYING3. If not see /* Structure storing statistics on how many call clobbers we have, and how many where avoided. */ -static struct +static struct { /* Number of call-clobbered ops we attempt to add to calls in add_call_clobbered_mem_symbols. */ @@ -90,7 +90,7 @@ static struct /* Number of reads (VUSEs) avoided by using not_read information. */ unsigned int static_read_clobbers_avoided; - + /* Number of write-clobbers avoided because the variable can't escape to this call. */ unsigned int unescapable_clobbers_avoided; @@ -109,7 +109,7 @@ static struct /* By default, operands are loaded. */ #define opf_use 0 -/* Operand is the target of an assignment expression or a +/* Operand is the target of an assignment expression or a call-clobbered variable. */ #define opf_def (1 << 0) @@ -126,6 +126,12 @@ static struct clobbering sites like function calls or ASM_EXPRs. */ #define opf_implicit (1 << 2) +/* Operand is in a place where address-taken does not imply addressable. */ +#define opf_non_addressable (1 << 3) + +/* Operand is in a place where opf_non_addressable does not apply. */ +#define opf_not_non_addressable (1 << 4) + /* Array for building all the def operands. */ static VEC(tree,heap) *build_defs; @@ -138,7 +144,7 @@ static tree build_vdef; /* The built VUSE operand. */ static tree build_vuse; -/* Bitmap obstack for our datastructures that needs to survive across +/* Bitmap obstack for our datastructures that needs to survive across compilations of multiple functions. */ static bitmap_obstack operands_bitmap_obstack; @@ -174,7 +180,7 @@ ssa_operands_active (void) return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; } - + /* Create the VOP variable, an artificial global variable to act as a representative of all of the virtual operands FUD chain. */ @@ -208,7 +214,7 @@ create_vop_var (void) In 1k we can fit 25 use operands (or 63 def operands) on a host with 8 byte pointers, that would be 10 statements each with 1 def and 2 uses. */ - + #define OP_SIZE_INIT 0 #define OP_SIZE_1 (1024 - sizeof (void *)) #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) @@ -289,7 +295,7 @@ fini_ssa_operands (void) /* Return memory for an operand of size SIZE. */ - + static inline void * ssa_operand_alloc (unsigned size) { @@ -319,9 +325,10 @@ ssa_operand_alloc (unsigned size) gcc_unreachable (); } - ptr = (struct ssa_operand_memory_d *) - ggc_alloc (sizeof (void *) - + gimple_ssa_operands (cfun)->ssa_operand_mem_size); + + ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *) + + gimple_ssa_operands (cfun)->ssa_operand_mem_size); + ptr->next = gimple_ssa_operands (cfun)->operand_memory; gimple_ssa_operands (cfun)->operand_memory = ptr; gimple_ssa_operands (cfun)->operand_memory_index = 0; @@ -374,7 +381,7 @@ alloc_use (void) /* Adds OP to the list of defs after LAST. */ -static inline def_optype_p +static inline def_optype_p add_def_op (tree *op, def_optype_p last) { def_optype_p new_def; @@ -529,8 +536,8 @@ finalize_ssa_uses (gimple stmt) /* Now create nodes for all the new nodes. */ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) - last = add_use_op (stmt, - (tree *) VEC_index (tree, build_uses, new_i), + last = add_use_op (stmt, + (tree *) VEC_index (tree, build_uses, new_i), last); /* Now set the stmt's operands. */ @@ -552,7 +559,7 @@ cleanup_build_arrays (void) /* Finalize all the build vectors, fill the new ones into INFO. */ - + static inline void finalize_ssa_stmt_operands (gimple stmt) { @@ -653,13 +660,11 @@ static void add_stmt_operand (tree *var_p, gimple stmt, int flags) { tree var, sym; - var_ann_t v_ann; gcc_assert (SSA_VAR_P (*var_p)); var = *var_p; sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); - v_ann = var_ann (sym); /* Mark statements with volatile operands. */ if (TREE_THIS_VOLATILE (sym)) @@ -693,17 +698,23 @@ mark_address_taken (tree ref) be referenced using pointer arithmetic. See PR 21407 and the ensuing mailing list discussion. */ var = get_base_address (ref); - if (var && DECL_P (var)) - TREE_ADDRESSABLE (var) = 1; + if (var) + { + if (DECL_P (var)) + TREE_ADDRESSABLE (var) = 1; + else if (TREE_CODE (var) == MEM_REF + && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR + && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0))) + TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1; + } } -/* A subroutine of get_expr_operands to handle INDIRECT_REF, - ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. +/* A subroutine of get_expr_operands to handle MEM_REF. - STMT is the statement being processed, EXPR is the INDIRECT_REF + STMT is the statement being processed, EXPR is the MEM_REF that got us here. - + FLAGS is as in get_expr_operands. RECURSE_ON_BASE should be set to true if we want to continue @@ -725,7 +736,8 @@ get_indirect_ref_operands (gimple stmt, tree expr, int flags, /* If requested, add a USE operand for the base pointer. */ if (recurse_on_base) get_expr_operands (stmt, pptr, - opf_use | (flags & opf_no_vops)); + opf_non_addressable | opf_use + | (flags & (opf_no_vops|opf_not_non_addressable))); } @@ -734,12 +746,13 @@ get_indirect_ref_operands (gimple stmt, tree expr, int flags, static void get_tmr_operands (gimple stmt, tree expr, int flags) { + if (TREE_THIS_VOLATILE (expr)) + gimple_set_has_volatile_ops (stmt, true); + /* First record the real operands. */ get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); - - if (TMR_SYMBOL (expr)) - mark_address_taken (TMR_SYMBOL (expr)); + get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops)); add_virtual_operand (stmt, flags); } @@ -758,9 +771,9 @@ maybe_add_call_vops (gimple stmt) call-clobbered. */ if (!(call_flags & ECF_NOVOPS)) { - /* A 'pure' or a 'const' function never call-clobbers anything. - A 'noreturn' function might, but since we don't return anyway - there is no point in recording that. */ + /* A 'pure' or a 'const' function never call-clobbers anything. + A 'noreturn' function might, but since we don't return anyway + there is no point in recording that. */ if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) add_virtual_operand (stmt, opf_def); else if (!(call_flags & ECF_CONST)) @@ -797,13 +810,9 @@ get_asm_expr_operands (gimple stmt) /* Memory operands are addressable. Note that STMT needs the address of this operand. */ if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t)) - mark_address_taken (t); - } + mark_address_taken (TREE_VALUE (link)); - get_expr_operands (stmt, &TREE_VALUE (link), opf_def); + get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable); } /* Gather all input operands. */ @@ -817,25 +826,14 @@ get_asm_expr_operands (gimple stmt) /* Memory operands are addressable. Note that STMT needs the address of this operand. */ if (!allows_reg && allows_mem) - { - tree t = get_base_address (TREE_VALUE (link)); - if (t && DECL_P (t)) - mark_address_taken (t); - } + mark_address_taken (TREE_VALUE (link)); - get_expr_operands (stmt, &TREE_VALUE (link), 0); + get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable); } /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ - for (i = 0; i < gimple_asm_nclobbers (stmt); i++) - { - tree link = gimple_asm_clobber_op (stmt, i); - if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) - { - add_virtual_operand (stmt, opf_def); - break; - } - } + if (gimple_asm_clobbers_memory_p (stmt)) + add_virtual_operand (stmt, opf_def); } @@ -867,7 +865,9 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags) reference to it, but the fact that the statement takes its address will be of interest to some passes (e.g. alias resolution). */ - if (!is_gimple_debug (stmt)) + if ((!(flags & opf_non_addressable) + || (flags & opf_not_non_addressable)) + && !is_gimple_debug (stmt)) mark_address_taken (TREE_OPERAND (expr, 0)); /* If the address is invariant, there may be no interesting @@ -881,7 +881,8 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags) here are ARRAY_REF indices which will always be real operands (GIMPLE does not allow non-registers as array indices). */ flags |= opf_no_vops; - get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 0), + flags | opf_not_non_addressable); return; case SSA_NAME: @@ -894,12 +895,11 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags) add_stmt_operand (expr_p, stmt, flags); return; - case MISALIGNED_INDIRECT_REF: - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); - /* fall through */ + case DEBUG_EXPR_DECL: + gcc_assert (gimple_debug_bind_p (stmt)); + return; - case ALIGN_INDIRECT_REF: - case INDIRECT_REF: + case MEM_REF: get_indirect_ref_operands (stmt, expr, flags, true); return; @@ -917,7 +917,7 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags) gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); - + if (code == COMPONENT_REF) { if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) @@ -989,19 +989,20 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags) case DOT_PROD_EXPR: case REALIGN_LOAD_EXPR: + case WIDEN_MULT_PLUS_EXPR: + case WIDEN_MULT_MINUS_EXPR: + case FMA_EXPR: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); - get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); - get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); - return; + get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); + get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); + return; } case FUNCTION_DECL: case LABEL_DECL: case CONST_DECL: case CASE_LABEL_EXPR: - case FILTER_EXPR: - case EXC_PTR_EXPR: /* Expressions that make no memory references. */ return; @@ -1057,6 +1058,9 @@ parse_ssa_operands (gimple stmt) /* Add call-clobbered operands, if needed. */ if (code == GIMPLE_CALL) maybe_add_call_vops (stmt); + + if (code == GIMPLE_RETURN) + append_vuse (gimple_vop (cfun)); } } @@ -1123,6 +1127,12 @@ update_stmt_operands (gimple stmt) timevar_push (TV_TREE_OPS); + /* If the stmt is a noreturn call queue it to be processed by + split_bbs_on_noreturn_calls during cfg cleanup. */ + if (is_gimple_call (stmt) + && gimple_call_noreturn_p (stmt)) + VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt); + gcc_assert (gimple_modified_p (stmt)); build_ssa_operands (stmt); gimple_set_modified (stmt, false); @@ -1183,7 +1193,7 @@ swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) /* Scan the immediate_use list for VAR making sure its linked properly. Return TRUE if there is a problem and emit an error message to F. */ -bool +DEBUG_FUNCTION bool verify_imm_links (FILE *f, tree var) { use_operand_p ptr, prev, list; @@ -1206,7 +1216,7 @@ verify_imm_links (FILE *f, tree var) { if (prev != ptr->prev) goto error; - + if (ptr->use == NULL) goto error; /* 2 roots, or SAFE guard node. */ else if (*(ptr->use) != var) @@ -1244,7 +1254,7 @@ verify_imm_links (FILE *f, tree var) fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); } - fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, + fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, (void *)ptr->use); print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); fprintf(f, "\n"); @@ -1307,7 +1317,7 @@ dump_immediate_uses (FILE *file) /* Dump def-use edges on stderr. */ -void +DEBUG_FUNCTION void debug_immediate_uses (void) { dump_immediate_uses (stderr); @@ -1316,7 +1326,7 @@ debug_immediate_uses (void) /* Dump def-use edges on stderr. */ -void +DEBUG_FUNCTION void debug_immediate_uses_for (tree var) { dump_immediate_uses_for (stderr, var);