X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-sra.c;h=80c4ca744e050c5660683a7924dff1b7e37d2065;hb=f67bd9ad79bc7ee36b0245d2f74e1685c3a471b6;hp=83659ab0e4e09043e2bbe7a9dc0fbb0d51699c9f;hpb=a55dc2cd86f718c1ad76ce7d418c9a8a44b2440b;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index 83659ab0e4e..80c4ca744e0 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -1,7 +1,7 @@ /* Scalar Replacement of Aggregates (SRA) converts some structure references into scalar references, exposing them to the scalar optimizers. - Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. Contributed by Diego Novillo This file is part of GCC. @@ -75,6 +75,9 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA */ +/* The set of todo flags to return from tree_sra. */ +static unsigned int todoflags; + /* The set of aggregate variables that are candidates for scalarization. */ static bitmap sra_candidates; @@ -86,20 +89,22 @@ static bitmap needs_copy_in; static bitmap sra_type_decomp_cache; static bitmap sra_type_inst_cache; -/* One of these structures is created for each candidate aggregate - and each (accessed) member of such an aggregate. */ +/* One of these structures is created for each candidate aggregate and + each (accessed) member or group of members of such an aggregate. */ struct sra_elt { /* A tree of the elements. Used when we want to traverse everything. */ struct sra_elt *parent; + struct sra_elt *groups; struct sra_elt *children; struct sra_elt *sibling; /* If this element is a root, then this is the VAR_DECL. If this is a sub-element, this is some token used to identify the reference. In the case of COMPONENT_REF, this is the FIELD_DECL. In the case - of an ARRAY_REF, this is the (constant) index. In the case of a - complex number, this is a zero or one. */ + of an ARRAY_REF, this is the (constant) index. In the case of an + ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case + of a complex number, this is a zero or one. */ tree element; /* The type of the element. */ @@ -119,6 +124,9 @@ struct sra_elt /* True if TYPE is scalar. */ bool is_scalar; + /* True if this element is a group of members of its parent. */ + bool is_group; + /* True if we saw something about this element that prevents scalarization, such as non-constant indexing. */ bool cannot_scalarize; @@ -127,10 +135,55 @@ struct sra_elt should happen via memcpy and not per-element. */ bool use_block_copy; + /* True if everything under this element has been marked TREE_NO_WARNING. */ + bool all_no_warning; + /* A flag for use with/after random access traversals. */ bool visited; }; +#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR) + +#define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \ + for ((CHILD) = (ELT)->is_group \ + ? next_child_for_group (NULL, (ELT)) \ + : (ELT)->children; \ + (CHILD); \ + (CHILD) = (ELT)->is_group \ + ? next_child_for_group ((CHILD), (ELT)) \ + : (CHILD)->sibling) + +/* Helper function for above macro. Return next child in group. */ +static struct sra_elt * +next_child_for_group (struct sra_elt *child, struct sra_elt *group) +{ + gcc_assert (group->is_group); + + /* Find the next child in the parent. */ + if (child) + child = child->sibling; + else + child = group->parent->children; + + /* Skip siblings that do not belong to the group. */ + while (child) + { + tree g_elt = group->element; + if (TREE_CODE (g_elt) == RANGE_EXPR) + { + if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0)) + && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element)) + break; + } + else + gcc_unreachable (); + + child = child->sibling; + } + + return child; +} + /* Random access to the child of a parent is performed by hashing. This prevents quadratic behavior, and allows SRA to function reasonably on larger records. */ @@ -162,7 +215,7 @@ is_sra_scalar_type (tree type) enum tree_code code = TREE_CODE (type); return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE - || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE + || code == POINTER_TYPE || code == OFFSET_TYPE || code == REFERENCE_TYPE); } @@ -172,8 +225,8 @@ is_sra_scalar_type (tree type) instantiated, just that if we decide to break up the type into separate pieces that it can be done. */ -static bool -type_can_be_decomposed_p (tree type) +bool +sra_type_can_be_decomposed_p (tree type) { unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2; tree t; @@ -275,7 +328,7 @@ decl_can_be_decomposed_p (tree var) } /* We must be able to decompose the variable's type. */ - if (!type_can_be_decomposed_p (TREE_TYPE (var))) + if (!sra_type_can_be_decomposed_p (TREE_TYPE (var))) { if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -296,7 +349,7 @@ type_can_instantiate_all_elements (tree type) { if (is_sra_scalar_type (type)) return true; - if (!type_can_be_decomposed_p (type)) + if (!sra_type_can_be_decomposed_p (type)) return false; switch (TREE_CODE (type)) @@ -346,7 +399,11 @@ can_completely_scalarize_p (struct sra_elt *elt) if (elt->cannot_scalarize) return false; - for (c = elt->children; c ; c = c->sibling) + for (c = elt->children; c; c = c->sibling) + if (!can_completely_scalarize_p (c)) + return false; + + for (c = elt->groups; c; c = c->sibling) if (!can_completely_scalarize_p (c)) return false; @@ -374,6 +431,11 @@ sra_hash_tree (tree t) h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t); break; + case RANGE_EXPR: + h = iterative_hash_expr (TREE_OPERAND (t, 0), 0); + h = iterative_hash_expr (TREE_OPERAND (t, 1), h); + break; + case FIELD_DECL: /* We can have types that are compatible, but have different member lists, so we can't hash fields by ID. Use offsets instead. */ @@ -441,6 +503,11 @@ sra_elt_eq (const void *x, const void *y) /* Integers are not pointer unique, so compare their values. */ return tree_int_cst_equal (ae, be); + case RANGE_EXPR: + return + tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0)) + && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1)); + case FIELD_DECL: /* Fields are unique within a record, but not between compatible records. */ @@ -464,7 +531,10 @@ lookup_element (struct sra_elt *parent, tree child, tree type, struct sra_elt **slot; struct sra_elt *elt; - dummy.parent = parent; + if (parent) + dummy.parent = parent->is_group ? parent->parent : parent; + else + dummy.parent = NULL; dummy.element = child; slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert); @@ -484,8 +554,17 @@ lookup_element (struct sra_elt *parent, tree child, tree type, if (parent) { - elt->sibling = parent->children; - parent->children = elt; + if (IS_ELEMENT_FOR_GROUP (elt->element)) + { + elt->is_group = true; + elt->sibling = parent->groups; + parent->groups = elt; + } + else + { + elt->sibling = parent->children; + parent->children = elt; + } } /* If this is a parameter, then if we want to scalarize, we have @@ -500,42 +579,6 @@ lookup_element (struct sra_elt *parent, tree child, tree type, return elt; } -/* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */ - -static bool -is_valid_const_index (tree expr) -{ - tree dom, t, index = TREE_OPERAND (expr, 1); - - if (TREE_CODE (index) != INTEGER_CST) - return false; - - /* Watch out for stupid user tricks, indexing outside the array. - - Careful, we're not called only on scalarizable types, so do not - assume constant array bounds. We needn't do anything with such - cases, since they'll be referring to objects that we should have - already rejected for scalarization, so returning false is fine. */ - - dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0))); - if (dom == NULL) - return false; - - t = TYPE_MIN_VALUE (dom); - if (!t || TREE_CODE (t) != INTEGER_CST) - return false; - if (tree_int_cst_lt (index, t)) - return false; - - t = TYPE_MAX_VALUE (dom); - if (!t || TREE_CODE (t) != INTEGER_CST) - return false; - if (tree_int_cst_lt (t, index)) - return false; - - return true; -} - /* Create or return the SRA_ELT structure for EXPR if the expression refers to a scalarizable variable. */ @@ -555,13 +598,25 @@ maybe_lookup_element_for_expr (tree expr) return NULL; case ARRAY_REF: - /* We can't scalarize variable array indicies. */ - if (is_valid_const_index (expr)) + /* We can't scalarize variable array indices. */ + if (in_array_bounds_p (expr)) child = TREE_OPERAND (expr, 1); else return NULL; break; + case ARRAY_RANGE_REF: + /* We can't scalarize variable array indices. */ + if (range_in_array_bounds_p (expr)) + { + tree domain = TYPE_DOMAIN (TREE_TYPE (expr)); + child = build2 (RANGE_EXPR, integer_type_node, + TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain)); + } + else + return NULL; + break; + case COMPONENT_REF: /* Don't look through unions. */ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE) @@ -598,9 +653,10 @@ struct sra_walk_fns /* Invoked when ELT is required as a unit. Note that ELT might refer to a leaf node, in which case this is a simple scalar reference. *EXPR_P points to the location of the expression. IS_OUTPUT is true if this - is a left-hand-side reference. */ + is a left-hand-side reference. USE_ALL is true if we saw something we + couldn't quite identify and had to force the use of the entire object. */ void (*use) (struct sra_elt *elt, tree *expr_p, - block_stmt_iterator *bsi, bool is_output); + block_stmt_iterator *bsi, bool is_output, bool use_all); /* Invoked when we have a copy between two scalarizable references. */ void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt, @@ -654,6 +710,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, tree expr = *expr_p; tree inner = expr; bool disable_scalarization = false; + bool use_all_p = false; /* We're looking to collect a reference expression between EXPR and INNER, such that INNER is a scalarizable decl and all other nodes through EXPR @@ -674,7 +731,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, if (disable_scalarization) elt->cannot_scalarize = true; else - fns->use (elt, expr_p, bsi, is_output); + fns->use (elt, expr_p, bsi, is_output, use_all_p); } return; @@ -689,7 +746,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, the effort. */ /* ??? Hack. Figure out how to push this into the scan routines without duplicating too much code. */ - if (!is_valid_const_index (inner)) + if (!in_array_bounds_p (inner)) { disable_scalarization = true; goto use_all; @@ -701,6 +758,18 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, inner = TREE_OPERAND (inner, 0); break; + case ARRAY_RANGE_REF: + if (!range_in_array_bounds_p (inner)) + { + disable_scalarization = true; + goto use_all; + } + /* ??? See above non-constant bounds and stride . */ + if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3)) + goto use_all; + inner = TREE_OPERAND (inner, 0); + break; + case COMPONENT_REF: /* A reference to a union member constitutes a reference to the entire union. */ @@ -723,11 +792,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, complete outer element, to which walk_tree will bring us next. */ goto use_all; - case ARRAY_RANGE_REF: - /* Similarly, an subrange reference is used to modify indexing. Which - means that the canonical element names that we have won't work. */ - goto use_all; - case VIEW_CONVERT_EXPR: case NOP_EXPR: /* Similarly, a view/nop explicitly wants to look at an object in a @@ -742,6 +806,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, use_all: expr_p = &TREE_OPERAND (inner, 0); inner = expr = *expr_p; + use_all_p = true; break; default: @@ -813,7 +878,7 @@ sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi, if (!rhs_elt->is_scalar) fns->ldst (rhs_elt, lhs, bsi, false); else - fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false); + fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false, false); } /* If it isn't scalarizable, there may be scalarizable variables within, so @@ -859,7 +924,7 @@ sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi, /* Otherwise we're being used in some context that requires the aggregate to be seen as a whole. Invoke USE. */ else - fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true); + fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true, false); } /* Similarly to above, LHS_ELT being null only means that the LHS as a @@ -967,7 +1032,7 @@ find_candidates_for_sra (void) static void scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED, block_stmt_iterator *bsi ATTRIBUTE_UNUSED, - bool is_output ATTRIBUTE_UNUSED) + bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED) { elt->n_uses += 1; } @@ -1007,6 +1072,9 @@ scan_dump (struct sra_elt *elt) for (c = elt->children; c ; c = c->sibling) scan_dump (c); + + for (c = elt->groups; c ; c = c->sibling) + scan_dump (c); } /* Entry point to phase 2. Scan the entire function, building up @@ -1177,10 +1245,19 @@ decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses, } else { - struct sra_elt *c; + struct sra_elt *c, *group; unsigned int this_uses = elt->n_uses + parent_uses; unsigned int this_copies = elt->n_copies + parent_copies; + /* Consider groups of sub-elements as weighing in favour of + instantiation whatever their size. */ + for (group = elt->groups; group ; group = group->sibling) + FOR_EACH_ACTUAL_CHILD (c, group) + { + c->n_uses += group->n_uses; + c->n_copies += group->n_copies; + } + for (c = elt->children; c ; c = c->sibling) decide_instantiation_1 (c, this_uses, this_copies); } @@ -1284,6 +1361,10 @@ decide_block_copy (struct sra_elt *elt) struct sra_elt *c; bool any_inst; + /* We shouldn't be invoked on groups of sub-elements as they must + behave like their parent as far as block copy is concerned. */ + gcc_assert (!elt->is_group); + /* If scalarization is disabled, respect it. */ if (elt->cannot_scalarize) { @@ -1302,6 +1383,14 @@ decide_block_copy (struct sra_elt *elt) c->cannot_scalarize = 1; decide_block_copy (c); } + + /* Groups behave like their parent. */ + for (c = elt->groups; c; c = c->sibling) + { + c->cannot_scalarize = 1; + c->use_block_copy = 1; + } + return false; } @@ -1325,7 +1414,7 @@ decide_block_copy (struct sra_elt *elt) else if (host_integerp (size_tree, 1)) { unsigned HOST_WIDE_INT full_size, inst_size = 0; - unsigned int max_size; + unsigned int max_size, max_count, inst_count, full_count; /* If the sra-max-structure-size parameter is 0, then the user has not overridden the parameter and we can choose a @@ -1333,8 +1422,13 @@ decide_block_copy (struct sra_elt *elt) max_size = SRA_MAX_STRUCTURE_SIZE ? SRA_MAX_STRUCTURE_SIZE : MOVE_RATIO * UNITS_PER_WORD; + max_count = SRA_MAX_STRUCTURE_COUNT + ? SRA_MAX_STRUCTURE_COUNT + : MOVE_RATIO; full_size = tree_low_cst (size_tree, 1); + full_count = count_type_elements (elt->type, false); + inst_count = sum_instantiated_sizes (elt, &inst_size); /* ??? What to do here. If there are two fields, and we've only instantiated one, then instantiating the other is clearly a win. @@ -1344,15 +1438,12 @@ decide_block_copy (struct sra_elt *elt) /* If the structure is small, and we've made copies, go ahead and instantiate, hoping that the copies will go away. */ if (full_size <= max_size + && (full_count - inst_count) <= max_count && elt->n_copies > elt->n_uses) use_block_copy = false; - else - { - sum_instantiated_sizes (elt, &inst_size); - - if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO) - use_block_copy = false; - } + else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO + && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO) + use_block_copy = false; /* In order to avoid block copy, we have to be able to instantiate all elements of the type. See if this is possible. */ @@ -1361,8 +1452,13 @@ decide_block_copy (struct sra_elt *elt) || !type_can_instantiate_all_elements (elt->type))) use_block_copy = true; } + elt->use_block_copy = use_block_copy; + /* Groups behave like their parent. */ + for (c = elt->groups; c; c = c->sibling) + c->use_block_copy = use_block_copy; + if (dump_file) { fprintf (dump_file, "Using %s for ", @@ -1424,6 +1520,9 @@ decide_instantiations (void) bitmap_and_compl_into (needs_copy_in, &done_head); } bitmap_clear (&done_head); + + if (!bitmap_empty_p (sra_candidates)) + todoflags |= TODO_update_smt_usage; mark_set_for_renaming (sra_candidates); @@ -1470,6 +1569,24 @@ mark_all_v_defs (tree list) } } +/* Mark every replacement under ELT with TREE_NO_WARNING. */ + +static void +mark_no_warning (struct sra_elt *elt) +{ + if (!elt->all_no_warning) + { + if (elt->replacement) + TREE_NO_WARNING (elt->replacement) = 1; + else + { + struct sra_elt *c; + FOR_EACH_ACTUAL_CHILD (c, elt) + mark_no_warning (c); + } + elt->all_no_warning = true; + } +} /* Build a single level component reference to ELT rooted at BASE. */ @@ -1486,17 +1603,22 @@ generate_one_element_ref (struct sra_elt *elt, tree base) if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base))) field = find_compatible_field (TREE_TYPE (base), field); - return build (COMPONENT_REF, elt->type, base, field, NULL); + return build3 (COMPONENT_REF, elt->type, base, field, NULL); } case ARRAY_TYPE: - return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL); + todoflags |= TODO_update_smt_usage; + if (TREE_CODE (elt->element) == RANGE_EXPR) + return build4 (ARRAY_RANGE_REF, elt->type, base, + TREE_OPERAND (elt->element, 0), NULL, NULL); + else + return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL); case COMPLEX_TYPE: if (elt->element == integer_zero_node) - return build (REALPART_EXPR, elt->type, base); + return build1 (REALPART_EXPR, elt->type, base); else - return build (IMAGPART_EXPR, elt->type, base); + return build1 (IMAGPART_EXPR, elt->type, base); default: gcc_unreachable (); @@ -1536,22 +1658,22 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr, c = lookup_element (elt, integer_one_node, NULL, NO_INSERT); i = c->replacement; - t = build (COMPLEX_EXPR, elt->type, r, i); - t = build (MODIFY_EXPR, void_type_node, expr, t); + t = build2 (COMPLEX_EXPR, elt->type, r, i); + t = build2 (MODIFY_EXPR, void_type_node, expr, t); SSA_NAME_DEF_STMT (expr) = t; append_to_statement_list (t, list_p); } else if (elt->replacement) { if (copy_out) - t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr); + t = build2 (MODIFY_EXPR, void_type_node, elt->replacement, expr); else - t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement); + t = build2 (MODIFY_EXPR, void_type_node, expr, elt->replacement); append_to_statement_list (t, list_p); } else { - for (c = elt->children; c ; c = c->sibling) + FOR_EACH_ACTUAL_CHILD (c, elt) { t = generate_one_element_ref (c, unshare_expr (expr)); generate_copy_inout (c, copy_out, t, list_p); @@ -1568,7 +1690,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p) { struct sra_elt *dc, *sc; - for (dc = dst->children; dc ; dc = dc->sibling) + FOR_EACH_ACTUAL_CHILD (dc, dst) { sc = lookup_element (src, dc->element, NULL, NO_INSERT); gcc_assert (sc); @@ -1581,8 +1703,8 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p) gcc_assert (src->replacement); - t = build (MODIFY_EXPR, void_type_node, dst->replacement, - src->replacement); + t = build2 (MODIFY_EXPR, void_type_node, dst->replacement, + src->replacement); append_to_statement_list (t, list_p); } } @@ -1603,7 +1725,7 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) return; } - for (c = elt->children; c ; c = c->sibling) + FOR_EACH_ACTUAL_CHILD (c, elt) generate_element_zero (c, list_p); if (elt->replacement) @@ -1613,7 +1735,7 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) gcc_assert (elt->is_scalar); t = fold_convert (elt->type, integer_zero_node); - t = build (MODIFY_EXPR, void_type_node, elt->replacement, t); + t = build2 (MODIFY_EXPR, void_type_node, elt->replacement, t); append_to_statement_list (t, list_p); } } @@ -1625,7 +1747,7 @@ static void generate_one_element_init (tree var, tree init, tree *list_p) { /* The replacement can be almost arbitrarily complex. Gimplify. */ - tree stmt = build (MODIFY_EXPR, void_type_node, var, init); + tree stmt = build2 (MODIFY_EXPR, void_type_node, var, init); gimplify_and_add (stmt, list_p); } @@ -1642,6 +1764,8 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p) enum tree_code init_code; struct sra_elt *sub; tree t; + unsigned HOST_WIDE_INT idx; + tree value, purpose; /* We can be passed DECL_INITIAL of a static variable. It might have a conversion, which we strip off here. */ @@ -1662,7 +1786,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p) { case COMPLEX_CST: case COMPLEX_EXPR: - for (sub = elt->children; sub ; sub = sub->sibling) + FOR_EACH_ACTUAL_CHILD (sub, elt) { if (sub->element == integer_zero_node) t = (init_code == COMPLEX_EXPR @@ -1675,11 +1799,8 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p) break; case CONSTRUCTOR: - for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t)) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value) { - tree purpose = TREE_PURPOSE (t); - tree value = TREE_VALUE (t); - if (TREE_CODE (purpose) == RANGE_EXPR) { tree lower = TREE_OPERAND (purpose, 0); @@ -1729,16 +1850,9 @@ generate_element_init (struct sra_elt *elt, tree init, tree *list_p) if (ret && *list_p) { tree_stmt_iterator i; - size_t old, new, j; - - old = num_referenced_vars; for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i)) find_new_referenced_vars (tsi_stmt_ptr (i)); - - new = num_referenced_vars; - for (j = old; j < new; ++j) - mark_sym_for_renaming (referenced_var (j)); } return ret; @@ -1776,7 +1890,7 @@ insert_edge_copies (tree stmt, basic_block bb) /* Helper function to insert LIST before BSI, and set up line number info. */ -static void +void sra_insert_before (block_stmt_iterator *bsi, tree list) { tree stmt = bsi_stmt (*bsi); @@ -1788,7 +1902,7 @@ sra_insert_before (block_stmt_iterator *bsi, tree list) /* Similarly, but insert after BSI. Handles insertion onto edges as well. */ -static void +void sra_insert_after (block_stmt_iterator *bsi, tree list) { tree stmt = bsi_stmt (*bsi); @@ -1808,7 +1922,7 @@ static void sra_replace (block_stmt_iterator *bsi, tree list) { sra_insert_before (bsi, list); - bsi_remove (bsi); + bsi_remove (bsi, false); if (bsi_end_p (*bsi)) *bsi = bsi_last (bsi->bb); else @@ -1821,7 +1935,7 @@ sra_replace (block_stmt_iterator *bsi, tree list) static void scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi, - bool is_output) + bool is_output, bool use_all) { tree list = NULL, stmt = bsi_stmt (*bsi); @@ -1856,7 +1970,11 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi, if (is_output) sra_insert_after (bsi, list); else - sra_insert_before (bsi, list); + { + sra_insert_before (bsi, list); + if (use_all) + mark_no_warning (elt); + } } } @@ -2021,7 +2139,7 @@ scalarize_ldst (struct sra_elt *elt, tree other, { /* Since ELT is not fully instantiated, we have to leave the block copy in place. Treat this as a USE. */ - scalarize_use (elt, NULL, bsi, is_output); + scalarize_use (elt, NULL, bsi, is_output, false); } else { @@ -2130,6 +2248,10 @@ dump_sra_elt_name (FILE *f, struct sra_elt *elt) fputc ('.', f); print_generic_expr (f, elt->element, dump_flags); } + else if (TREE_CODE (elt->element) == RANGE_EXPR) + fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]", + TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)), + TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1))); else fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]", TREE_INT_CST_LOW (elt->element)); @@ -2145,17 +2267,27 @@ debug_sra_elt_name (struct sra_elt *elt) fputc ('\n', stderr); } +void +sra_init_cache (void) +{ + if (sra_type_decomp_cache) + return; + + sra_type_decomp_cache = BITMAP_ALLOC (NULL); + sra_type_inst_cache = BITMAP_ALLOC (NULL); +} + /* Main entry point. */ -static void +static unsigned int tree_sra (void) { /* Initialize local variables. */ + todoflags = 0; gcc_obstack_init (&sra_obstack); sra_candidates = BITMAP_ALLOC (NULL); needs_copy_in = BITMAP_ALLOC (NULL); - sra_type_decomp_cache = BITMAP_ALLOC (NULL); - sra_type_inst_cache = BITMAP_ALLOC (NULL); + sra_init_cache (); sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL); /* Scan. If we find anything, instantiate and scalarize. */ @@ -2174,6 +2306,7 @@ tree_sra (void) BITMAP_FREE (sra_type_decomp_cache); BITMAP_FREE (sra_type_inst_cache); obstack_free (&sra_obstack, NULL); + return todoflags; } static bool @@ -2193,9 +2326,10 @@ struct tree_opt_pass pass_sra = TV_TREE_SRA, /* tv_id */ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ 0, /* properties_provided */ - 0, /* properties_destroyed */ + PROP_smt_usage, /* properties_destroyed */ 0, /* todo_flags_start */ - TODO_dump_func | TODO_update_ssa - | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */ + TODO_dump_func /* todo_flags_finish */ + | TODO_update_ssa + | TODO_ggc_collect | TODO_verify_ssa, 0 /* letter */ };