1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behavior, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
146 /* Forward declarations. */
147 static tree generate_element_ref (struct sra_elt *);
149 /* Return true if DECL is an SRA candidate. */
152 is_sra_candidate_decl (tree decl)
154 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
157 /* Return true if TYPE is a scalar type. */
160 is_sra_scalar_type (tree type)
162 enum tree_code code = TREE_CODE (type);
163 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
164 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
165 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
166 || code == REFERENCE_TYPE);
169 /* Return true if TYPE can be decomposed into a set of independent variables.
171 Note that this doesn't imply that all elements of TYPE can be
172 instantiated, just that if we decide to break up the type into
173 separate pieces that it can be done. */
176 sra_type_can_be_decomposed_p (tree type)
178 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
181 /* Avoid searching the same type twice. */
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
184 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
187 /* The type must have a definite nonzero size. */
188 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
189 || integer_zerop (TYPE_SIZE (type)))
192 /* The type must be a non-union aggregate. */
193 switch (TREE_CODE (type))
197 bool saw_one_field = false;
199 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
200 if (TREE_CODE (t) == FIELD_DECL)
202 /* Reject incorrectly represented bit fields. */
203 if (DECL_BIT_FIELD (t)
204 && (tree_low_cst (DECL_SIZE (t), 1)
205 != TYPE_PRECISION (TREE_TYPE (t))))
208 saw_one_field = true;
211 /* Record types must have at least one field. */
218 /* Array types must have a fixed lower and upper bound. */
219 t = TYPE_DOMAIN (type);
222 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
224 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
235 bitmap_set_bit (sra_type_decomp_cache, cache+0);
239 bitmap_set_bit (sra_type_decomp_cache, cache+1);
243 /* Return true if DECL can be decomposed into a set of independent
244 (though not necessarily scalar) variables. */
247 decl_can_be_decomposed_p (tree var)
249 /* Early out for scalars. */
250 if (is_sra_scalar_type (TREE_TYPE (var)))
253 /* The variable must not be aliased. */
254 if (!is_gimple_non_addressable (var))
256 if (dump_file && (dump_flags & TDF_DETAILS))
258 fprintf (dump_file, "Cannot scalarize variable ");
259 print_generic_expr (dump_file, var, dump_flags);
260 fprintf (dump_file, " because it must live in memory\n");
265 /* The variable must not be volatile. */
266 if (TREE_THIS_VOLATILE (var))
268 if (dump_file && (dump_flags & TDF_DETAILS))
270 fprintf (dump_file, "Cannot scalarize variable ");
271 print_generic_expr (dump_file, var, dump_flags);
272 fprintf (dump_file, " because it is declared volatile\n");
277 /* We must be able to decompose the variable's type. */
278 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
280 if (dump_file && (dump_flags & TDF_DETAILS))
282 fprintf (dump_file, "Cannot scalarize variable ");
283 print_generic_expr (dump_file, var, dump_flags);
284 fprintf (dump_file, " because its type cannot be decomposed\n");
292 /* Return true if TYPE can be *completely* decomposed into scalars. */
295 type_can_instantiate_all_elements (tree type)
297 if (is_sra_scalar_type (type))
299 if (!sra_type_can_be_decomposed_p (type))
302 switch (TREE_CODE (type))
306 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
309 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
311 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
314 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
315 if (TREE_CODE (f) == FIELD_DECL)
317 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
319 bitmap_set_bit (sra_type_inst_cache, cache+1);
324 bitmap_set_bit (sra_type_inst_cache, cache+0);
329 return type_can_instantiate_all_elements (TREE_TYPE (type));
339 /* Test whether ELT or some sub-element cannot be scalarized. */
342 can_completely_scalarize_p (struct sra_elt *elt)
346 if (elt->cannot_scalarize)
349 for (c = elt->children; c ; c = c->sibling)
350 if (!can_completely_scalarize_p (c))
357 /* A simplified tree hashing algorithm that only handles the types of
358 trees we expect to find in sra_elt->element. */
361 sra_hash_tree (tree t)
365 switch (TREE_CODE (t))
374 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
378 /* We can have types that are compatible, but have different member
379 lists, so we can't hash fields by ID. Use offsets instead. */
380 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
381 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
391 /* Hash function for type SRA_PAIR. */
394 sra_elt_hash (const void *x)
396 const struct sra_elt *e = x;
397 const struct sra_elt *p;
400 h = sra_hash_tree (e->element);
402 /* Take into account everything back up the chain. Given that chain
403 lengths are rarely very long, this should be acceptable. If we
404 truly identify this as a performance problem, it should work to
405 hash the pointer value "e->parent". */
406 for (p = e->parent; p ; p = p->parent)
407 h = (h * 65521) ^ sra_hash_tree (p->element);
412 /* Equality function for type SRA_PAIR. */
415 sra_elt_eq (const void *x, const void *y)
417 const struct sra_elt *a = x;
418 const struct sra_elt *b = y;
421 if (a->parent != b->parent)
429 if (TREE_CODE (ae) != TREE_CODE (be))
432 switch (TREE_CODE (ae))
437 /* These are all pointer unique. */
441 /* Integers are not pointer unique, so compare their values. */
442 return tree_int_cst_equal (ae, be);
445 /* Fields are unique within a record, but not between
446 compatible records. */
447 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
449 return fields_compatible_p (ae, be);
456 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
457 may be null, in which case CHILD must be a DECL. */
459 static struct sra_elt *
460 lookup_element (struct sra_elt *parent, tree child, tree type,
461 enum insert_option insert)
463 struct sra_elt dummy;
464 struct sra_elt **slot;
467 dummy.parent = parent;
468 dummy.element = child;
470 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
471 if (!slot && insert == NO_INSERT)
475 if (!elt && insert == INSERT)
477 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
478 memset (elt, 0, sizeof (*elt));
480 elt->parent = parent;
481 elt->element = child;
483 elt->is_scalar = is_sra_scalar_type (type);
487 elt->sibling = parent->children;
488 parent->children = elt;
491 /* If this is a parameter, then if we want to scalarize, we have
492 one copy from the true function parameter. Count it now. */
493 if (TREE_CODE (child) == PARM_DECL)
496 bitmap_set_bit (needs_copy_in, DECL_UID (child));
503 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
506 is_valid_const_index (tree expr)
508 tree dom, t, index = TREE_OPERAND (expr, 1);
510 if (TREE_CODE (index) != INTEGER_CST)
513 /* Watch out for stupid user tricks, indexing outside the array.
515 Careful, we're not called only on scalarizable types, so do not
516 assume constant array bounds. We needn't do anything with such
517 cases, since they'll be referring to objects that we should have
518 already rejected for scalarization, so returning false is fine. */
520 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
524 t = TYPE_MIN_VALUE (dom);
525 if (!t || TREE_CODE (t) != INTEGER_CST)
527 if (tree_int_cst_lt (index, t))
530 t = TYPE_MAX_VALUE (dom);
531 if (!t || TREE_CODE (t) != INTEGER_CST)
533 if (tree_int_cst_lt (t, index))
539 /* Create or return the SRA_ELT structure for EXPR if the expression
540 refers to a scalarizable variable. */
542 static struct sra_elt *
543 maybe_lookup_element_for_expr (tree expr)
548 switch (TREE_CODE (expr))
553 if (is_sra_candidate_decl (expr))
554 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
558 /* We can't scalarize variable array indicies. */
559 if (is_valid_const_index (expr))
560 child = TREE_OPERAND (expr, 1);
566 /* Don't look through unions. */
567 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
569 child = TREE_OPERAND (expr, 1);
573 child = integer_zero_node;
576 child = integer_one_node;
583 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
585 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
590 /* Functions to walk just enough of the tree to see all scalarizable
591 references, and categorize them. */
593 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
594 various kinds of references seen. In all cases, *BSI is an iterator
595 pointing to the statement being processed. */
598 /* Invoked when ELT is required as a unit. Note that ELT might refer to
599 a leaf node, in which case this is a simple scalar reference. *EXPR_P
600 points to the location of the expression. IS_OUTPUT is true if this
601 is a left-hand-side reference. */
602 void (*use) (struct sra_elt *elt, tree *expr_p,
603 block_stmt_iterator *bsi, bool is_output);
605 /* Invoked when we have a copy between two scalarizable references. */
606 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
607 block_stmt_iterator *bsi);
609 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
610 in which case it should be treated as an empty CONSTRUCTOR. */
611 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
613 /* Invoked when we have a copy between one scalarizable reference ELT
614 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
615 is on the left-hand side. */
616 void (*ldst) (struct sra_elt *elt, tree other,
617 block_stmt_iterator *bsi, bool is_output);
619 /* True during phase 2, false during phase 4. */
620 /* ??? This is a hack. */
624 #ifdef ENABLE_CHECKING
625 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
628 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
629 void *data ATTRIBUTE_UNUSED)
632 enum tree_code code = TREE_CODE (t);
634 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
637 if (is_sra_candidate_decl (t))
647 /* Walk most expressions looking for a scalarizable aggregate.
648 If we find one, invoke FNS->USE. */
651 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
652 const struct sra_walk_fns *fns)
656 bool disable_scalarization = false;
658 /* We're looking to collect a reference expression between EXPR and INNER,
659 such that INNER is a scalarizable decl and all other nodes through EXPR
660 are references that we can scalarize. If we come across something that
661 we can't scalarize, we reset EXPR. This has the effect of making it
662 appear that we're referring to the larger expression as a whole. */
665 switch (TREE_CODE (inner))
670 /* If there is a scalarizable decl at the bottom, then process it. */
671 if (is_sra_candidate_decl (inner))
673 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
674 if (disable_scalarization)
675 elt->cannot_scalarize = true;
677 fns->use (elt, expr_p, bsi, is_output);
682 /* Non-constant index means any member may be accessed. Prevent the
683 expression from being scalarized. If we were to treat this as a
684 reference to the whole array, we can wind up with a single dynamic
685 index reference inside a loop being overridden by several constant
686 index references during loop setup. It's possible that this could
687 be avoided by using dynamic usage counts based on BB trip counts
688 (based on loop analysis or profiling), but that hardly seems worth
690 /* ??? Hack. Figure out how to push this into the scan routines
691 without duplicating too much code. */
692 if (!is_valid_const_index (inner))
694 disable_scalarization = true;
697 /* ??? Are we assured that non-constant bounds and stride will have
698 the same value everywhere? I don't think Fortran will... */
699 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
701 inner = TREE_OPERAND (inner, 0);
705 /* A reference to a union member constitutes a reference to the
707 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
709 /* ??? See above re non-constant stride. */
710 if (TREE_OPERAND (inner, 2))
712 inner = TREE_OPERAND (inner, 0);
717 inner = TREE_OPERAND (inner, 0);
721 /* A bit field reference (access to *multiple* fields simultaneously)
722 is not currently scalarized. Consider this an access to the
723 complete outer element, to which walk_tree will bring us next. */
726 case ARRAY_RANGE_REF:
727 /* Similarly, an subrange reference is used to modify indexing. Which
728 means that the canonical element names that we have won't work. */
731 case VIEW_CONVERT_EXPR:
733 /* Similarly, a view/nop explicitly wants to look at an object in a
734 type other than the one we've scalarized. */
738 /* This is a transparent wrapper. The entire inner expression really
743 expr_p = &TREE_OPERAND (inner, 0);
744 inner = expr = *expr_p;
748 #ifdef ENABLE_CHECKING
749 /* Validate that we're not missing any references. */
750 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
756 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
757 If we find one, invoke FNS->USE. */
760 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
761 const struct sra_walk_fns *fns)
764 for (op = list; op ; op = TREE_CHAIN (op))
765 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
768 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
769 If we find one, invoke FNS->USE. */
772 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
773 const struct sra_walk_fns *fns)
775 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
778 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
779 aggregates. If we find one, invoke FNS->USE. */
782 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
783 const struct sra_walk_fns *fns)
785 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
786 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
789 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
792 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
793 const struct sra_walk_fns *fns)
795 struct sra_elt *lhs_elt, *rhs_elt;
798 lhs = TREE_OPERAND (expr, 0);
799 rhs = TREE_OPERAND (expr, 1);
800 lhs_elt = maybe_lookup_element_for_expr (lhs);
801 rhs_elt = maybe_lookup_element_for_expr (rhs);
803 /* If both sides are scalarizable, this is a COPY operation. */
804 if (lhs_elt && rhs_elt)
806 fns->copy (lhs_elt, rhs_elt, bsi);
810 /* If the RHS is scalarizable, handle it. There are only two cases. */
813 if (!rhs_elt->is_scalar)
814 fns->ldst (rhs_elt, lhs, bsi, false);
816 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
819 /* If it isn't scalarizable, there may be scalarizable variables within, so
820 check for a call or else walk the RHS to see if we need to do any
821 copy-in operations. We need to do it before the LHS is scalarized so
822 that the statements get inserted in the proper place, before any
823 copy-out operations. */
826 tree call = get_call_expr_in (rhs);
828 sra_walk_call_expr (call, bsi, fns);
830 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
833 /* Likewise, handle the LHS being scalarizable. We have cases similar
834 to those above, but also want to handle RHS being constant. */
837 /* If this is an assignment from a constant, or constructor, then
838 we have access to all of the elements individually. Invoke INIT. */
839 if (TREE_CODE (rhs) == COMPLEX_EXPR
840 || TREE_CODE (rhs) == COMPLEX_CST
841 || TREE_CODE (rhs) == CONSTRUCTOR)
842 fns->init (lhs_elt, rhs, bsi);
844 /* If this is an assignment from read-only memory, treat this as if
845 we'd been passed the constructor directly. Invoke INIT. */
846 else if (TREE_CODE (rhs) == VAR_DECL
848 && TREE_READONLY (rhs)
849 && targetm.binds_local_p (rhs))
850 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
852 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
853 The lvalue requirement prevents us from trying to directly scalarize
854 the result of a function call. Which would result in trying to call
855 the function multiple times, and other evil things. */
856 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
857 fns->ldst (lhs_elt, rhs, bsi, true);
859 /* Otherwise we're being used in some context that requires the
860 aggregate to be seen as a whole. Invoke USE. */
862 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
865 /* Similarly to above, LHS_ELT being null only means that the LHS as a
866 whole is not a scalarizable reference. There may be occurrences of
867 scalarizable variables within, which implies a USE. */
869 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
872 /* Entry point to the walk functions. Search the entire function,
873 invoking the callbacks in FNS on each of the references to
874 scalarizable variables. */
877 sra_walk_function (const struct sra_walk_fns *fns)
880 block_stmt_iterator si, ni;
882 /* ??? Phase 4 could derive some benefit to walking the function in
883 dominator tree order. */
886 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
891 stmt = bsi_stmt (si);
892 ann = stmt_ann (stmt);
897 /* If the statement has no virtual operands, then it doesn't
898 make any structure references that we care about. */
899 if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
902 switch (TREE_CODE (stmt))
905 /* If we have "return <retval>" then the return value is
906 already exposed for our pleasure. Walk it as a USE to
907 force all the components back in place for the return.
909 If we have an embedded assignment, then <retval> is of
910 a type that gets returned in registers in this ABI, and
911 we do not wish to extend their lifetimes. Treat this
912 as a USE of the variable on the RHS of this assignment. */
914 t = TREE_OPERAND (stmt, 0);
915 if (TREE_CODE (t) == MODIFY_EXPR)
916 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
918 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
922 sra_walk_modify_expr (stmt, &si, fns);
925 sra_walk_call_expr (stmt, &si, fns);
928 sra_walk_asm_expr (stmt, &si, fns);
937 /* Phase One: Scan all referenced variables in the program looking for
938 structures that could be decomposed. */
941 find_candidates_for_sra (void)
943 bool any_set = false;
945 referenced_var_iterator rvi;
947 FOR_EACH_REFERENCED_VAR (var, rvi)
949 if (decl_can_be_decomposed_p (var))
951 bitmap_set_bit (sra_candidates, DECL_UID (var));
960 /* Phase Two: Scan all references to scalarizable variables. Count the
961 number of times they are used or copied respectively. */
963 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
964 considered a copy, because we can decompose the reference such that
965 the sub-elements needn't be contiguous. */
968 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
969 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
970 bool is_output ATTRIBUTE_UNUSED)
976 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
977 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
979 lhs_elt->n_copies += 1;
980 rhs_elt->n_copies += 1;
984 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
985 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
987 lhs_elt->n_copies += 1;
991 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
992 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
993 bool is_output ATTRIBUTE_UNUSED)
998 /* Dump the values we collected during the scanning phase. */
1001 scan_dump (struct sra_elt *elt)
1005 dump_sra_elt_name (dump_file, elt);
1006 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1008 for (c = elt->children; c ; c = c->sibling)
1012 /* Entry point to phase 2. Scan the entire function, building up
1013 scalarization data structures, recording copies and uses. */
1016 scan_function (void)
1018 static const struct sra_walk_fns fns = {
1019 scan_use, scan_copy, scan_init, scan_ldst, true
1023 sra_walk_function (&fns);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1029 fputs ("\nScan results:\n", dump_file);
1030 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1032 tree var = referenced_var (i);
1033 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1037 fputc ('\n', dump_file);
1041 /* Phase Three: Make decisions about which variables to scalarize, if any.
1042 All elements to be scalarized have replacement variables made for them. */
1044 /* A subroutine of build_element_name. Recursively build the element
1045 name on the obstack. */
1048 build_element_name_1 (struct sra_elt *elt)
1055 build_element_name_1 (elt->parent);
1056 obstack_1grow (&sra_obstack, '$');
1058 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1060 if (elt->element == integer_zero_node)
1061 obstack_grow (&sra_obstack, "real", 4);
1063 obstack_grow (&sra_obstack, "imag", 4);
1069 if (TREE_CODE (t) == INTEGER_CST)
1071 /* ??? Eh. Don't bother doing double-wide printing. */
1072 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1073 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1077 tree name = DECL_NAME (t);
1079 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1080 IDENTIFIER_LENGTH (name));
1083 sprintf (buffer, "D%u", DECL_UID (t));
1084 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1089 /* Construct a pretty variable name for an element's replacement variable.
1090 The name is built on the obstack. */
1093 build_element_name (struct sra_elt *elt)
1095 build_element_name_1 (elt);
1096 obstack_1grow (&sra_obstack, '\0');
1097 return XOBFINISH (&sra_obstack, char *);
1100 /* Instantiate an element as an independent variable. */
1103 instantiate_element (struct sra_elt *elt)
1105 struct sra_elt *base_elt;
1108 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1110 base = base_elt->element;
1112 elt->replacement = var = make_rename_temp (elt->type, "SR");
1113 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1114 DECL_ARTIFICIAL (var) = 1;
1116 if (TREE_THIS_VOLATILE (elt->type))
1118 TREE_THIS_VOLATILE (var) = 1;
1119 TREE_SIDE_EFFECTS (var) = 1;
1122 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1124 char *pretty_name = build_element_name (elt);
1125 DECL_NAME (var) = get_identifier (pretty_name);
1126 obstack_free (&sra_obstack, pretty_name);
1128 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1129 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1131 DECL_IGNORED_P (var) = 0;
1132 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1136 DECL_IGNORED_P (var) = 1;
1137 /* ??? We can't generate any warning that would be meaningful. */
1138 TREE_NO_WARNING (var) = 1;
1143 fputs (" ", dump_file);
1144 dump_sra_elt_name (dump_file, elt);
1145 fputs (" -> ", dump_file);
1146 print_generic_expr (dump_file, var, dump_flags);
1147 fputc ('\n', dump_file);
1151 /* Make one pass across an element tree deciding whether or not it's
1152 profitable to instantiate individual leaf scalars.
1154 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1155 fields all the way up the tree. */
1158 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1159 unsigned int parent_copies)
1161 if (dump_file && !elt->parent)
1163 fputs ("Initial instantiation for ", dump_file);
1164 dump_sra_elt_name (dump_file, elt);
1165 fputc ('\n', dump_file);
1168 if (elt->cannot_scalarize)
1173 /* The decision is simple: instantiate if we're used more frequently
1174 than the parent needs to be seen as a complete unit. */
1175 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1176 instantiate_element (elt);
1181 unsigned int this_uses = elt->n_uses + parent_uses;
1182 unsigned int this_copies = elt->n_copies + parent_copies;
1184 for (c = elt->children; c ; c = c->sibling)
1185 decide_instantiation_1 (c, this_uses, this_copies);
1189 /* Compute the size and number of all instantiated elements below ELT.
1190 We will only care about this if the size of the complete structure
1191 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1194 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1196 if (elt->replacement)
1198 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1204 unsigned int count = 0;
1206 for (c = elt->children; c ; c = c->sibling)
1207 count += sum_instantiated_sizes (c, sizep);
1213 /* Instantiate fields in ELT->TYPE that are not currently present as
1216 static void instantiate_missing_elements (struct sra_elt *elt);
1219 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1221 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1224 if (sub->replacement == NULL)
1225 instantiate_element (sub);
1228 instantiate_missing_elements (sub);
1232 instantiate_missing_elements (struct sra_elt *elt)
1234 tree type = elt->type;
1236 switch (TREE_CODE (type))
1241 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1242 if (TREE_CODE (f) == FIELD_DECL)
1243 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1249 tree i, max, subtype;
1251 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1252 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1253 subtype = TREE_TYPE (type);
1257 instantiate_missing_elements_1 (elt, i, subtype);
1258 if (tree_int_cst_equal (i, max))
1260 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1267 type = TREE_TYPE (type);
1268 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1269 instantiate_missing_elements_1 (elt, integer_one_node, type);
1277 /* Make one pass across an element tree deciding whether to perform block
1278 or element copies. If we decide on element copies, instantiate all
1279 elements. Return true if there are any instantiated sub-elements. */
1282 decide_block_copy (struct sra_elt *elt)
1287 /* If scalarization is disabled, respect it. */
1288 if (elt->cannot_scalarize)
1290 elt->use_block_copy = 1;
1294 fputs ("Scalarization disabled for ", dump_file);
1295 dump_sra_elt_name (dump_file, elt);
1296 fputc ('\n', dump_file);
1299 /* Disable scalarization of sub-elements */
1300 for (c = elt->children; c; c = c->sibling)
1302 c->cannot_scalarize = 1;
1303 decide_block_copy (c);
1308 /* Don't decide if we've no uses. */
1309 if (elt->n_uses == 0 && elt->n_copies == 0)
1312 else if (!elt->is_scalar)
1314 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1315 bool use_block_copy = true;
1317 /* Tradeoffs for COMPLEX types pretty much always make it better
1318 to go ahead and split the components. */
1319 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1320 use_block_copy = false;
1322 /* Don't bother trying to figure out the rest if the structure is
1323 so large we can't do easy arithmetic. This also forces block
1324 copies for variable sized structures. */
1325 else if (host_integerp (size_tree, 1))
1327 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1328 unsigned int max_size;
1330 /* If the sra-max-structure-size parameter is 0, then the
1331 user has not overridden the parameter and we can choose a
1332 sensible default. */
1333 max_size = SRA_MAX_STRUCTURE_SIZE
1334 ? SRA_MAX_STRUCTURE_SIZE
1335 : MOVE_RATIO * UNITS_PER_WORD;
1337 full_size = tree_low_cst (size_tree, 1);
1339 /* ??? What to do here. If there are two fields, and we've only
1340 instantiated one, then instantiating the other is clearly a win.
1341 If there are a large number of fields then the size of the copy
1342 is much more of a factor. */
1344 /* If the structure is small, and we've made copies, go ahead
1345 and instantiate, hoping that the copies will go away. */
1346 if (full_size <= max_size
1347 && elt->n_copies > elt->n_uses)
1348 use_block_copy = false;
1351 sum_instantiated_sizes (elt, &inst_size);
1353 if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1354 use_block_copy = false;
1357 /* In order to avoid block copy, we have to be able to instantiate
1358 all elements of the type. See if this is possible. */
1360 && (!can_completely_scalarize_p (elt)
1361 || !type_can_instantiate_all_elements (elt->type)))
1362 use_block_copy = true;
1364 elt->use_block_copy = use_block_copy;
1368 fprintf (dump_file, "Using %s for ",
1369 use_block_copy ? "block-copy" : "element-copy");
1370 dump_sra_elt_name (dump_file, elt);
1371 fputc ('\n', dump_file);
1374 if (!use_block_copy)
1376 instantiate_missing_elements (elt);
1381 any_inst = elt->replacement != NULL;
1383 for (c = elt->children; c ; c = c->sibling)
1384 any_inst |= decide_block_copy (c);
1389 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1392 decide_instantiations (void)
1396 bitmap_head done_head;
1399 /* We cannot clear bits from a bitmap we're iterating over,
1400 so save up all the bits to clear until the end. */
1401 bitmap_initialize (&done_head, &bitmap_default_obstack);
1402 cleared_any = false;
1404 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1406 tree var = referenced_var (i);
1407 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1410 decide_instantiation_1 (elt, 0, 0);
1411 if (!decide_block_copy (elt))
1416 bitmap_set_bit (&done_head, i);
1423 bitmap_and_compl_into (sra_candidates, &done_head);
1424 bitmap_and_compl_into (needs_copy_in, &done_head);
1426 bitmap_clear (&done_head);
1428 mark_set_for_renaming (sra_candidates);
1431 fputc ('\n', dump_file);
1435 /* Phase Four: Update the function to match the replacements created. */
1437 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1438 renaming. This becomes necessary when we modify all of a non-scalar. */
1441 mark_all_v_defs_1 (tree stmt)
1446 update_stmt_if_modified (stmt);
1448 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1450 if (TREE_CODE (sym) == SSA_NAME)
1451 sym = SSA_NAME_VAR (sym);
1452 mark_sym_for_renaming (sym);
1457 /* Mark all the variables in virtual operands in all the statements in
1458 LIST for renaming. */
1461 mark_all_v_defs (tree list)
1463 if (TREE_CODE (list) != STATEMENT_LIST)
1464 mark_all_v_defs_1 (list);
1467 tree_stmt_iterator i;
1468 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
1469 mark_all_v_defs_1 (tsi_stmt (i));
1474 /* Build a single level component reference to ELT rooted at BASE. */
1477 generate_one_element_ref (struct sra_elt *elt, tree base)
1479 switch (TREE_CODE (TREE_TYPE (base)))
1483 tree field = elt->element;
1485 /* Watch out for compatible records with differing field lists. */
1486 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1487 field = find_compatible_field (TREE_TYPE (base), field);
1489 return build (COMPONENT_REF, elt->type, base, field, NULL);
1493 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1496 if (elt->element == integer_zero_node)
1497 return build (REALPART_EXPR, elt->type, base);
1499 return build (IMAGPART_EXPR, elt->type, base);
1506 /* Build a full component reference to ELT rooted at its native variable. */
1509 generate_element_ref (struct sra_elt *elt)
1512 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1514 return elt->element;
1517 /* Generate a set of assignment statements in *LIST_P to copy all
1518 instantiated elements under ELT to or from the equivalent structure
1519 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1520 true meaning to copy out of EXPR into ELT. */
1523 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1529 if (!copy_out && TREE_CODE (expr) == SSA_NAME
1530 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
1534 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
1536 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
1539 t = build (COMPLEX_EXPR, elt->type, r, i);
1540 t = build (MODIFY_EXPR, void_type_node, expr, t);
1541 SSA_NAME_DEF_STMT (expr) = t;
1542 append_to_statement_list (t, list_p);
1544 else if (elt->replacement)
1547 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1549 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1550 append_to_statement_list (t, list_p);
1554 for (c = elt->children; c ; c = c->sibling)
1556 t = generate_one_element_ref (c, unshare_expr (expr));
1557 generate_copy_inout (c, copy_out, t, list_p);
1562 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1563 elements under SRC to their counterparts under DST. There must be a 1-1
1564 correspondence of instantiated elements. */
1567 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1569 struct sra_elt *dc, *sc;
1571 for (dc = dst->children; dc ; dc = dc->sibling)
1573 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1575 generate_element_copy (dc, sc, list_p);
1578 if (dst->replacement)
1582 gcc_assert (src->replacement);
1584 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1586 append_to_statement_list (t, list_p);
1590 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1591 elements under ELT. In addition, do not assign to elements that have been
1592 marked VISITED but do reset the visited flag; this allows easy coordination
1593 with generate_element_init. */
1596 generate_element_zero (struct sra_elt *elt, tree *list_p)
1602 elt->visited = false;
1606 for (c = elt->children; c ; c = c->sibling)
1607 generate_element_zero (c, list_p);
1609 if (elt->replacement)
1613 gcc_assert (elt->is_scalar);
1614 t = fold_convert (elt->type, integer_zero_node);
1616 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1617 append_to_statement_list (t, list_p);
1621 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1622 Add the result to *LIST_P. */
1625 generate_one_element_init (tree var, tree init, tree *list_p)
1627 /* The replacement can be almost arbitrarily complex. Gimplify. */
1628 tree stmt = build (MODIFY_EXPR, void_type_node, var, init);
1629 gimplify_and_add (stmt, list_p);
1632 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1633 elements under ELT with the contents of the initializer INIT. In addition,
1634 mark all assigned elements VISITED; this allows easy coordination with
1635 generate_element_zero. Return false if we found a case we couldn't
1639 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
1642 enum tree_code init_code;
1643 struct sra_elt *sub;
1645 unsigned HOST_WIDE_INT idx;
1646 tree value, purpose;
1648 /* We can be passed DECL_INITIAL of a static variable. It might have a
1649 conversion, which we strip off here. */
1650 STRIP_USELESS_TYPE_CONVERSION (init);
1651 init_code = TREE_CODE (init);
1655 if (elt->replacement)
1657 generate_one_element_init (elt->replacement, init, list_p);
1658 elt->visited = true;
1667 for (sub = elt->children; sub ; sub = sub->sibling)
1669 if (sub->element == integer_zero_node)
1670 t = (init_code == COMPLEX_EXPR
1671 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1673 t = (init_code == COMPLEX_EXPR
1674 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1675 result &= generate_element_init_1 (sub, t, list_p);
1680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
1682 if (TREE_CODE (purpose) == RANGE_EXPR)
1684 tree lower = TREE_OPERAND (purpose, 0);
1685 tree upper = TREE_OPERAND (purpose, 1);
1689 sub = lookup_element (elt, lower, NULL, NO_INSERT);
1691 result &= generate_element_init_1 (sub, value, list_p);
1692 if (tree_int_cst_equal (lower, upper))
1694 lower = int_const_binop (PLUS_EXPR, lower,
1695 integer_one_node, true);
1700 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
1702 result &= generate_element_init_1 (sub, value, list_p);
1708 elt->visited = true;
1715 /* A wrapper function for generate_element_init_1 that handles cleanup after
1719 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1723 push_gimplify_context ();
1724 ret = generate_element_init_1 (elt, init, list_p);
1725 pop_gimplify_context (NULL);
1727 /* The replacement can expose previously unreferenced variables. */
1730 tree_stmt_iterator i;
1732 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
1733 find_new_referenced_vars (tsi_stmt_ptr (i));
1739 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1740 has more than one edge, STMT will be replicated for each edge. Also,
1741 abnormal edges will be ignored. */
1744 insert_edge_copies (tree stmt, basic_block bb)
1751 FOR_EACH_EDGE (e, ei, bb->succs)
1753 /* We don't need to insert copies on abnormal edges. The
1754 value of the scalar replacement is not guaranteed to
1755 be valid through an abnormal edge. */
1756 if (!(e->flags & EDGE_ABNORMAL))
1760 bsi_insert_on_edge (e, stmt);
1764 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1769 /* Helper function to insert LIST before BSI, and set up line number info. */
1772 sra_insert_before (block_stmt_iterator *bsi, tree list)
1774 tree stmt = bsi_stmt (*bsi);
1776 if (EXPR_HAS_LOCATION (stmt))
1777 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1778 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1781 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1784 sra_insert_after (block_stmt_iterator *bsi, tree list)
1786 tree stmt = bsi_stmt (*bsi);
1788 if (EXPR_HAS_LOCATION (stmt))
1789 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1791 if (stmt_ends_bb_p (stmt))
1792 insert_edge_copies (list, bsi->bb);
1794 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1797 /* Similarly, but replace the statement at BSI. */
1800 sra_replace (block_stmt_iterator *bsi, tree list)
1802 sra_insert_before (bsi, list);
1804 if (bsi_end_p (*bsi))
1805 *bsi = bsi_last (bsi->bb);
1810 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1811 if elt is scalar, or some occurrence of ELT that requires a complete
1812 aggregate. IS_OUTPUT is true if ELT is being modified. */
1815 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1818 tree list = NULL, stmt = bsi_stmt (*bsi);
1820 if (elt->replacement)
1822 /* If we have a replacement, then updating the reference is as
1823 simple as modifying the existing statement in place. */
1825 mark_all_v_defs (stmt);
1826 *expr_p = elt->replacement;
1831 /* Otherwise we need some copies. If ELT is being read, then we want
1832 to store all (modified) sub-elements back into the structure before
1833 the reference takes place. If ELT is being written, then we want to
1834 load the changed values back into our shadow variables. */
1835 /* ??? We don't check modified for reads, we just always write all of
1836 the values. We should be able to record the SSA number of the VOP
1837 for which the values were last read. If that number matches the
1838 SSA number of the VOP in the current statement, then we needn't
1839 emit an assignment. This would also eliminate double writes when
1840 a structure is passed as more than one argument to a function call.
1841 This optimization would be most effective if sra_walk_function
1842 processed the blocks in dominator order. */
1844 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1847 mark_all_v_defs (list);
1849 sra_insert_after (bsi, list);
1851 sra_insert_before (bsi, list);
1855 /* Scalarize a COPY. To recap, this is an assignment statement between
1856 two scalarizable references, LHS_ELT and RHS_ELT. */
1859 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1860 block_stmt_iterator *bsi)
1864 if (lhs_elt->replacement && rhs_elt->replacement)
1866 /* If we have two scalar operands, modify the existing statement. */
1867 stmt = bsi_stmt (*bsi);
1869 /* See the commentary in sra_walk_function concerning
1870 RETURN_EXPR, and why we should never see one here. */
1871 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
1873 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1874 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1877 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1879 /* If either side requires a block copy, then sync the RHS back
1880 to the original structure, leave the original assignment
1881 statement (which will perform the block copy), then load the
1882 LHS values out of its now-updated original structure. */
1883 /* ??? Could perform a modified pair-wise element copy. That
1884 would at least allow those elements that are instantiated in
1885 both structures to be optimized well. */
1888 generate_copy_inout (rhs_elt, false,
1889 generate_element_ref (rhs_elt), &list);
1892 mark_all_v_defs (list);
1893 sra_insert_before (bsi, list);
1897 generate_copy_inout (lhs_elt, true,
1898 generate_element_ref (lhs_elt), &list);
1901 mark_all_v_defs (list);
1902 sra_insert_after (bsi, list);
1907 /* Otherwise both sides must be fully instantiated. In which
1908 case perform pair-wise element assignments and replace the
1909 original block copy statement. */
1911 stmt = bsi_stmt (*bsi);
1912 mark_all_v_defs (stmt);
1915 generate_element_copy (lhs_elt, rhs_elt, &list);
1917 mark_all_v_defs (list);
1918 sra_replace (bsi, list);
1922 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1923 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1924 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1928 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1933 /* Generate initialization statements for all members extant in the RHS. */
1936 /* Unshare the expression just in case this is from a decl's initial. */
1937 rhs = unshare_expr (rhs);
1938 result = generate_element_init (lhs_elt, rhs, &list);
1941 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1942 a zero value. Initialize the rest of the instantiated elements. */
1943 generate_element_zero (lhs_elt, &list);
1947 /* If we failed to convert the entire initializer, then we must
1948 leave the structure assignment in place and must load values
1949 from the structure into the slots for which we did not find
1950 constants. The easiest way to do this is to generate a complete
1951 copy-out, and then follow that with the constant assignments
1952 that we were able to build. DCE will clean things up. */
1954 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
1956 append_to_statement_list (list, &list0);
1960 if (lhs_elt->use_block_copy || !result)
1962 /* Since LHS is not fully instantiated, we must leave the structure
1963 assignment in place. Treating this case differently from a USE
1964 exposes constants to later optimizations. */
1967 mark_all_v_defs (list);
1968 sra_insert_after (bsi, list);
1973 /* The LHS is fully instantiated. The list of initializations
1974 replaces the original structure assignment. */
1976 mark_all_v_defs (bsi_stmt (*bsi));
1977 mark_all_v_defs (list);
1978 sra_replace (bsi, list);
1982 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1983 on all INDIRECT_REFs. */
1986 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1990 if (TREE_CODE (t) == INDIRECT_REF)
1992 TREE_THIS_NOTRAP (t) = 1;
1995 else if (IS_TYPE_OR_DECL_P (t))
2001 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
2002 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
2003 if ELT is on the left-hand side. */
2006 scalarize_ldst (struct sra_elt *elt, tree other,
2007 block_stmt_iterator *bsi, bool is_output)
2009 /* Shouldn't have gotten called for a scalar. */
2010 gcc_assert (!elt->replacement);
2012 if (elt->use_block_copy)
2014 /* Since ELT is not fully instantiated, we have to leave the
2015 block copy in place. Treat this as a USE. */
2016 scalarize_use (elt, NULL, bsi, is_output);
2020 /* The interesting case is when ELT is fully instantiated. In this
2021 case we can have each element stored/loaded directly to/from the
2022 corresponding slot in OTHER. This avoids a block copy. */
2024 tree list = NULL, stmt = bsi_stmt (*bsi);
2026 mark_all_v_defs (stmt);
2027 generate_copy_inout (elt, is_output, other, &list);
2028 mark_all_v_defs (list);
2031 /* Preserve EH semantics. */
2032 if (stmt_ends_bb_p (stmt))
2034 tree_stmt_iterator tsi;
2037 /* Extract the first statement from LIST. */
2038 tsi = tsi_start (list);
2039 first = tsi_stmt (tsi);
2042 /* Replace the old statement with this new representative. */
2043 bsi_replace (bsi, first, true);
2045 if (!tsi_end_p (tsi))
2047 /* If any reference would trap, then they all would. And more
2048 to the point, the first would. Therefore none of the rest
2049 will trap since the first didn't. Indicate this by
2050 iterating over the remaining statements and set
2051 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
2054 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
2057 while (!tsi_end_p (tsi));
2059 insert_edge_copies (list, bsi->bb);
2063 sra_replace (bsi, list);
2067 /* Generate initializations for all scalarizable parameters. */
2070 scalarize_parms (void)
2076 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
2078 tree var = referenced_var (i);
2079 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
2080 generate_copy_inout (elt, true, var, &list);
2085 insert_edge_copies (list, ENTRY_BLOCK_PTR);
2086 mark_all_v_defs (list);
2090 /* Entry point to phase 4. Update the function to match replacements. */
2093 scalarize_function (void)
2095 static const struct sra_walk_fns fns = {
2096 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2099 sra_walk_function (&fns);
2101 bsi_commit_edge_inserts ();
2105 /* Debug helper function. Print ELT in a nice human-readable format. */
2108 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2110 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2112 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2113 dump_sra_elt_name (f, elt->parent);
2118 dump_sra_elt_name (f, elt->parent);
2119 if (DECL_P (elt->element))
2121 if (TREE_CODE (elt->element) == FIELD_DECL)
2123 print_generic_expr (f, elt->element, dump_flags);
2126 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2127 TREE_INT_CST_LOW (elt->element));
2131 /* Likewise, but callable from the debugger. */
2134 debug_sra_elt_name (struct sra_elt *elt)
2136 dump_sra_elt_name (stderr, elt);
2137 fputc ('\n', stderr);
2141 sra_init_cache (void)
2143 if (sra_type_decomp_cache)
2146 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
2147 sra_type_inst_cache = BITMAP_ALLOC (NULL);
2150 /* Main entry point. */
2155 /* Initialize local variables. */
2156 gcc_obstack_init (&sra_obstack);
2157 sra_candidates = BITMAP_ALLOC (NULL);
2158 needs_copy_in = BITMAP_ALLOC (NULL);
2160 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2162 /* Scan. If we find anything, instantiate and scalarize. */
2163 if (find_candidates_for_sra ())
2166 decide_instantiations ();
2167 scalarize_function ();
2170 /* Free allocated memory. */
2171 htab_delete (sra_map);
2173 BITMAP_FREE (sra_candidates);
2174 BITMAP_FREE (needs_copy_in);
2175 BITMAP_FREE (sra_type_decomp_cache);
2176 BITMAP_FREE (sra_type_inst_cache);
2177 obstack_free (&sra_obstack, NULL);
2183 return flag_tree_sra != 0;
2186 struct tree_opt_pass pass_sra =
2189 gate_sra, /* gate */
2190 tree_sra, /* execute */
2193 0, /* static_pass_number */
2194 TV_TREE_SRA, /* tv_id */
2195 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2196 0, /* properties_provided */
2197 0, /* properties_destroyed */
2198 0, /* todo_flags_start */
2199 TODO_dump_func | TODO_update_ssa
2200 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */