1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behaviour, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
147 /* Return true if DECL is an SRA candidate. */
150 is_sra_candidate_decl (tree decl)
152 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
155 /* Return true if TYPE is a scalar type. */
158 is_sra_scalar_type (tree type)
160 enum tree_code code = TREE_CODE (type);
161 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
162 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
163 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
164 || code == REFERENCE_TYPE);
167 /* Return true if TYPE can be decomposed into a set of independent variables.
169 Note that this doesn't imply that all elements of TYPE can be
170 instantiated, just that if we decide to break up the type into
171 separate pieces that it can be done. */
174 type_can_be_decomposed_p (tree type)
176 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
179 /* Avoid searching the same type twice. */
180 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
185 /* The type must have a definite non-zero size. */
186 if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
189 /* The type must be a non-union aggregate. */
190 switch (TREE_CODE (type))
194 bool saw_one_field = false;
196 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
197 if (TREE_CODE (t) == FIELD_DECL)
199 /* Reject incorrectly represented bit fields. */
200 if (DECL_BIT_FIELD (t)
201 && (tree_low_cst (DECL_SIZE (t), 1)
202 != TYPE_PRECISION (TREE_TYPE (t))))
205 saw_one_field = true;
208 /* Record types must have at least one field. */
215 /* Array types must have a fixed lower and upper bound. */
216 t = TYPE_DOMAIN (type);
219 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
221 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
232 bitmap_set_bit (sra_type_decomp_cache, cache+0);
236 bitmap_set_bit (sra_type_decomp_cache, cache+1);
240 /* Return true if DECL can be decomposed into a set of independent
241 (though not necessarily scalar) variables. */
244 decl_can_be_decomposed_p (tree var)
246 /* Early out for scalars. */
247 if (is_sra_scalar_type (TREE_TYPE (var)))
250 /* The variable must not be aliased. */
251 if (!is_gimple_non_addressable (var))
253 if (dump_file && (dump_flags & TDF_DETAILS))
255 fprintf (dump_file, "Cannot scalarize variable ");
256 print_generic_expr (dump_file, var, dump_flags);
257 fprintf (dump_file, " because it must live in memory\n");
262 /* The variable must not be volatile. */
263 if (TREE_THIS_VOLATILE (var))
265 if (dump_file && (dump_flags & TDF_DETAILS))
267 fprintf (dump_file, "Cannot scalarize variable ");
268 print_generic_expr (dump_file, var, dump_flags);
269 fprintf (dump_file, " because it is declared volatile\n");
274 /* We must be able to decompose the variable's type. */
275 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
277 if (dump_file && (dump_flags & TDF_DETAILS))
279 fprintf (dump_file, "Cannot scalarize variable ");
280 print_generic_expr (dump_file, var, dump_flags);
281 fprintf (dump_file, " because its type cannot be decomposed\n");
289 /* Return true if TYPE can be *completely* decomposed into scalars. */
292 type_can_instantiate_all_elements (tree type)
294 if (is_sra_scalar_type (type))
296 if (!type_can_be_decomposed_p (type))
299 switch (TREE_CODE (type))
303 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
308 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
312 if (TREE_CODE (f) == FIELD_DECL)
314 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
316 bitmap_set_bit (sra_type_inst_cache, cache+1);
321 bitmap_set_bit (sra_type_inst_cache, cache+0);
326 return type_can_instantiate_all_elements (TREE_TYPE (type));
336 /* Test whether ELT or some sub-element cannot be scalarized. */
339 can_completely_scalarize_p (struct sra_elt *elt)
343 if (elt->cannot_scalarize)
346 for (c = elt->children; c ; c = c->sibling)
347 if (!can_completely_scalarize_p (c))
354 /* A simplified tree hashing algorithm that only handles the types of
355 trees we expect to find in sra_elt->element. */
358 sra_hash_tree (tree t)
360 switch (TREE_CODE (t))
368 return TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
374 /* Hash function for type SRA_PAIR. */
377 sra_elt_hash (const void *x)
379 const struct sra_elt *e = x;
380 const struct sra_elt *p;
383 h = sra_hash_tree (e->element);
385 /* Take into account everything back up the chain. Given that chain
386 lengths are rarely very long, this should be acceptable. If we
387 truely identify this as a performance problem, it should work to
388 hash the pointer value "e->parent". */
389 for (p = e->parent; p ; p = p->parent)
390 h = (h * 65521) ^ sra_hash_tree (p->element);
395 /* Equality function for type SRA_PAIR. */
398 sra_elt_eq (const void *x, const void *y)
400 const struct sra_elt *a = x;
401 const struct sra_elt *b = y;
403 if (a->parent != b->parent)
406 /* All the field/decl stuff is unique. */
407 if (a->element == b->element)
410 /* The only thing left is integer equality. */
411 if (TREE_CODE (a->element) == INTEGER_CST
412 && TREE_CODE (b->element) == INTEGER_CST)
413 return tree_int_cst_equal (a->element, b->element);
418 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
419 may be null, in which case CHILD must be a DECL. */
421 static struct sra_elt *
422 lookup_element (struct sra_elt *parent, tree child, tree type,
423 enum insert_option insert)
425 struct sra_elt dummy;
426 struct sra_elt **slot;
429 dummy.parent = parent;
430 dummy.element = child;
432 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
433 if (!slot && insert == NO_INSERT)
437 if (!elt && insert == INSERT)
439 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
440 memset (elt, 0, sizeof (*elt));
442 elt->parent = parent;
443 elt->element = child;
445 elt->is_scalar = is_sra_scalar_type (type);
449 elt->sibling = parent->children;
450 parent->children = elt;
453 /* If this is a parameter, then if we want to scalarize, we have
454 one copy from the true function parameter. Count it now. */
455 if (TREE_CODE (child) == PARM_DECL)
458 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
465 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
468 is_valid_const_index (tree expr)
470 tree dom, t, index = TREE_OPERAND (expr, 1);
472 if (TREE_CODE (index) != INTEGER_CST)
475 /* Watch out for stupid user tricks, indexing outside the array.
477 Careful, we're not called only on scalarizable types, so do not
478 assume constant array bounds. We needn't do anything with such
479 cases, since they'll be referring to objects that we should have
480 already rejected for scalarization, so returning false is fine. */
482 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
486 t = TYPE_MIN_VALUE (dom);
487 if (!t || TREE_CODE (t) != INTEGER_CST)
489 if (tree_int_cst_lt (index, t))
492 t = TYPE_MAX_VALUE (dom);
493 if (!t || TREE_CODE (t) != INTEGER_CST)
495 if (tree_int_cst_lt (t, index))
501 /* Create or return the SRA_ELT structure for EXPR if the expression
502 refers to a scalarizable variable. */
504 static struct sra_elt *
505 maybe_lookup_element_for_expr (tree expr)
510 switch (TREE_CODE (expr))
515 if (is_sra_candidate_decl (expr))
516 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
520 /* We can't scalarize variable array indicies. */
521 if (is_valid_const_index (expr))
522 child = TREE_OPERAND (expr, 1);
528 /* Don't look through unions. */
529 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
531 child = TREE_OPERAND (expr, 1);
535 child = integer_zero_node;
538 child = integer_one_node;
545 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
547 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
552 /* Functions to walk just enough of the tree to see all scalarizable
553 references, and categorize them. */
555 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
556 various kinds of references seen. In all cases, *BSI is an iterator
557 pointing to the statement being processed. */
560 /* Invoked when ELT is required as a unit. Note that ELT might refer to
561 a leaf node, in which case this is a simple scalar reference. *EXPR_P
562 points to the location of the expression. IS_OUTPUT is true if this
563 is a left-hand-side reference. */
564 void (*use) (struct sra_elt *elt, tree *expr_p,
565 block_stmt_iterator *bsi, bool is_output);
567 /* Invoked when we have a copy between two scalarizable references. */
568 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
569 block_stmt_iterator *bsi);
571 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
572 in which case it should be treated as an empty CONSTRUCTOR. Return
573 false if we found a case we couldn't handle. */
574 bool (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
576 /* Invoked when we have a copy between one scalarizable reference ELT
577 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
578 is on the left-hand side. */
579 void (*ldst) (struct sra_elt *elt, tree other,
580 block_stmt_iterator *bsi, bool is_output);
582 /* True during phase 2, false during phase 4. */
583 /* ??? This is a hack. */
587 #ifdef ENABLE_CHECKING
588 /* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
591 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
592 void *data ATTRIBUTE_UNUSED)
595 enum tree_code code = TREE_CODE (t);
597 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
600 if (is_sra_candidate_decl (t))
610 /* Walk most expressions looking for a scalarizable aggregate.
611 If we find one, invoke FNS->USE. */
614 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
615 const struct sra_walk_fns *fns)
619 bool disable_scalarization = false;
621 /* We're looking to collect a reference expression between EXPR and INNER,
622 such that INNER is a scalarizable decl and all other nodes through EXPR
623 are references that we can scalarize. If we come across something that
624 we can't scalarize, we reset EXPR. This has the effect of making it
625 appear that we're referring to the larger expression as a whole. */
628 switch (TREE_CODE (inner))
633 /* If there is a scalarizable decl at the bottom, then process it. */
634 if (is_sra_candidate_decl (inner))
636 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
637 if (disable_scalarization)
638 elt->cannot_scalarize = true;
640 fns->use (elt, expr_p, bsi, is_output);
645 /* Non-constant index means any member may be accessed. Prevent the
646 expression from being scalarized. If we were to treat this as a
647 reference to the whole array, we can wind up with a single dynamic
648 index reference inside a loop being overridden by several constant
649 index references during loop setup. It's possible that this could
650 be avoided by using dynamic usage counts based on BB trip counts
651 (based on loop analysis or profiling), but that hardly seems worth
653 /* ??? Hack. Figure out how to push this into the scan routines
654 without duplicating too much code. */
655 if (!is_valid_const_index (inner))
657 disable_scalarization = true;
660 /* ??? Are we assured that non-constant bounds and stride will have
661 the same value everywhere? I don't think Fortran will... */
662 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
664 inner = TREE_OPERAND (inner, 0);
668 /* A reference to a union member constitutes a reference to the
670 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
672 /* ??? See above re non-constant stride. */
673 if (TREE_OPERAND (inner, 2))
675 inner = TREE_OPERAND (inner, 0);
680 inner = TREE_OPERAND (inner, 0);
684 /* A bit field reference (access to *multiple* fields simultaneously)
685 is not currently scalarized. Consider this an access to the
686 complete outer element, to which walk_tree will bring us next. */
689 case ARRAY_RANGE_REF:
690 /* Similarly, an subrange reference is used to modify indexing. Which
691 means that the canonical element names that we have won't work. */
694 case VIEW_CONVERT_EXPR:
696 /* Similarly, a view/nop explicitly wants to look at an object in a
697 type other than the one we've scalarized. */
701 expr_p = &TREE_OPERAND (inner, 0);
702 inner = expr = *expr_p;
706 #ifdef ENABLE_CHECKING
707 /* Validate that we're not missing any references. */
708 if (walk_tree (&inner, sra_find_candidate_decl, NULL, NULL))
715 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
716 If we find one, invoke FNS->USE. */
719 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
720 const struct sra_walk_fns *fns)
723 for (op = list; op ; op = TREE_CHAIN (op))
724 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
727 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
728 If we find one, invoke FNS->USE. */
731 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
732 const struct sra_walk_fns *fns)
734 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
737 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
738 aggregates. If we find one, invoke FNS->USE. */
741 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
742 const struct sra_walk_fns *fns)
744 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
745 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
748 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
751 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
752 const struct sra_walk_fns *fns)
754 struct sra_elt *lhs_elt, *rhs_elt;
757 lhs = TREE_OPERAND (expr, 0);
758 rhs = TREE_OPERAND (expr, 1);
759 lhs_elt = maybe_lookup_element_for_expr (lhs);
760 rhs_elt = maybe_lookup_element_for_expr (rhs);
762 /* If both sides are scalarizable, this is a COPY operation. */
763 if (lhs_elt && rhs_elt)
765 fns->copy (lhs_elt, rhs_elt, bsi);
771 /* If this is an assignment from a constant, or constructor, then
772 we have access to all of the elements individually. Invoke INIT. */
773 if ((TREE_CODE (rhs) == COMPLEX_EXPR
774 || TREE_CODE (rhs) == COMPLEX_CST
775 || TREE_CODE (rhs) == CONSTRUCTOR)
776 && fns->init (lhs_elt, rhs, bsi))
779 /* If this is an assignment from read-only memory, treat this as if
780 we'd been passed the constructor directly. Invoke INIT. */
781 else if (TREE_CODE (rhs) == VAR_DECL
783 && TREE_READONLY (rhs)
784 && targetm.binds_local_p (rhs)
785 && DECL_INITIAL (rhs)
786 && fns->init (lhs_elt, DECL_INITIAL (rhs), bsi))
789 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
790 The lvalue requirement prevents us from trying to directly scalarize
791 the result of a function call. Which would result in trying to call
792 the function multiple times, and other evil things. */
793 else if (!lhs_elt->is_scalar && is_gimple_addr_expr_arg (rhs))
794 fns->ldst (lhs_elt, rhs, bsi, true);
796 /* Otherwise we're being used in some context that requires the
797 aggregate to be seen as a whole. Invoke USE. */
799 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
803 /* LHS_ELT being null only means that the LHS as a whole is not a
804 scalarizable reference. There may be occurrences of scalarizable
805 variables within, which implies a USE. */
806 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
809 /* Likewise for the right-hand side. The only difference here is that
810 we don't have to handle constants, and the RHS may be a call. */
813 if (!rhs_elt->is_scalar)
814 fns->ldst (rhs_elt, lhs, bsi, false);
816 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
818 else if (TREE_CODE (rhs) == CALL_EXPR)
819 sra_walk_call_expr (rhs, bsi, fns);
821 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
824 /* Entry point to the walk functions. Search the entire function,
825 invoking the callbacks in FNS on each of the references to
826 scalarizable variables. */
829 sra_walk_function (const struct sra_walk_fns *fns)
832 block_stmt_iterator si, ni;
834 /* ??? Phase 4 could derive some benefit to walking the function in
835 dominator tree order. */
838 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
843 stmt = bsi_stmt (si);
844 ann = stmt_ann (stmt);
849 /* If the statement has no virtual operands, then it doesn't
850 make any structure references that we care about. */
851 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
852 && NUM_VUSES (VUSE_OPS (ann)) == 0
853 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
856 switch (TREE_CODE (stmt))
859 /* If we have "return <retval>" then the return value is
860 already exposed for our pleasure. Walk it as a USE to
861 force all the components back in place for the return.
863 If we have an embedded assignment, then <retval> is of
864 a type that gets returned in registers in this ABI, and
865 we do not wish to extend their lifetimes. Treat this
866 as a USE of the variable on the RHS of this assignment. */
868 t = TREE_OPERAND (stmt, 0);
869 if (TREE_CODE (t) == MODIFY_EXPR)
870 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
872 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
876 sra_walk_modify_expr (stmt, &si, fns);
879 sra_walk_call_expr (stmt, &si, fns);
882 sra_walk_asm_expr (stmt, &si, fns);
891 /* Phase One: Scan all referenced variables in the program looking for
892 structures that could be decomposed. */
895 find_candidates_for_sra (void)
898 bool any_set = false;
900 for (i = 0; i < num_referenced_vars; i++)
902 tree var = referenced_var (i);
903 if (decl_can_be_decomposed_p (var))
905 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
914 /* Phase Two: Scan all references to scalarizable variables. Count the
915 number of times they are used or copied respectively. */
917 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
918 considered a copy, because we can decompose the reference such that
919 the sub-elements needn't be contiguous. */
922 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
923 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
924 bool is_output ATTRIBUTE_UNUSED)
930 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
931 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
933 lhs_elt->n_copies += 1;
934 rhs_elt->n_copies += 1;
938 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
939 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
941 lhs_elt->n_copies += 1;
946 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
947 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
948 bool is_output ATTRIBUTE_UNUSED)
953 /* Dump the values we collected during the scanning phase. */
956 scan_dump (struct sra_elt *elt)
960 dump_sra_elt_name (dump_file, elt);
961 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
963 for (c = elt->children; c ; c = c->sibling)
967 /* Entry point to phase 2. Scan the entire function, building up
968 scalarization data structures, recording copies and uses. */
973 static const struct sra_walk_fns fns = {
974 scan_use, scan_copy, scan_init, scan_ldst, true
977 sra_walk_function (&fns);
979 if (dump_file && (dump_flags & TDF_DETAILS))
983 fputs ("\nScan results:\n", dump_file);
984 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
986 tree var = referenced_var (i);
987 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
991 fputc ('\n', dump_file);
995 /* Phase Three: Make decisions about which variables to scalarize, if any.
996 All elements to be scalarized have replacement variables made for them. */
998 /* A subroutine of build_element_name. Recursively build the element
999 name on the obstack. */
1002 build_element_name_1 (struct sra_elt *elt)
1009 build_element_name_1 (elt->parent);
1010 obstack_1grow (&sra_obstack, '$');
1012 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1014 if (elt->element == integer_zero_node)
1015 obstack_grow (&sra_obstack, "real", 4);
1017 obstack_grow (&sra_obstack, "imag", 4);
1023 if (TREE_CODE (t) == INTEGER_CST)
1025 /* ??? Eh. Don't bother doing double-wide printing. */
1026 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1027 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1031 tree name = DECL_NAME (t);
1033 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1034 IDENTIFIER_LENGTH (name));
1037 sprintf (buffer, "D%u", DECL_UID (t));
1038 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1043 /* Construct a pretty variable name for an element's replacement variable.
1044 The name is built on the obstack. */
1047 build_element_name (struct sra_elt *elt)
1049 build_element_name_1 (elt);
1050 obstack_1grow (&sra_obstack, '\0');
1051 return obstack_finish (&sra_obstack);
1054 /* Instantiate an element as an independent variable. */
1057 instantiate_element (struct sra_elt *elt)
1059 struct sra_elt *base_elt;
1062 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1064 base = base_elt->element;
1066 elt->replacement = var = make_rename_temp (elt->type, "SR");
1067 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1068 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1069 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1071 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1073 char *pretty_name = build_element_name (elt);
1074 DECL_NAME (var) = get_identifier (pretty_name);
1075 obstack_free (&sra_obstack, pretty_name);
1080 fputs (" ", dump_file);
1081 dump_sra_elt_name (dump_file, elt);
1082 fputs (" -> ", dump_file);
1083 print_generic_expr (dump_file, var, dump_flags);
1084 fputc ('\n', dump_file);
1088 /* Make one pass across an element tree deciding whether or not it's
1089 profitable to instantiate individual leaf scalars.
1091 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1092 fields all the way up the tree. */
1095 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1096 unsigned int parent_copies)
1098 if (dump_file && !elt->parent)
1100 fputs ("Initial instantiation for ", dump_file);
1101 dump_sra_elt_name (dump_file, elt);
1102 fputc ('\n', dump_file);
1105 if (elt->cannot_scalarize)
1110 /* The decision is simple: instantiate if we're used more frequently
1111 than the parent needs to be seen as a complete unit. */
1112 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1113 instantiate_element (elt);
1118 unsigned int this_uses = elt->n_uses + parent_uses;
1119 unsigned int this_copies = elt->n_copies + parent_copies;
1121 for (c = elt->children; c ; c = c->sibling)
1122 decide_instantiation_1 (c, this_uses, this_copies);
1126 /* Compute the size and number of all instantiated elements below ELT.
1127 We will only care about this if the size of the complete structure
1128 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1131 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1133 if (elt->replacement)
1135 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1141 unsigned int count = 0;
1143 for (c = elt->children; c ; c = c->sibling)
1144 count += sum_instantiated_sizes (c, sizep);
1150 /* Instantiate fields in ELT->TYPE that are not currently present as
1153 static void instantiate_missing_elements (struct sra_elt *elt);
1156 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1158 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1161 if (sub->replacement == NULL)
1162 instantiate_element (sub);
1165 instantiate_missing_elements (sub);
1169 instantiate_missing_elements (struct sra_elt *elt)
1171 tree type = elt->type;
1173 switch (TREE_CODE (type))
1178 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1179 if (TREE_CODE (f) == FIELD_DECL)
1180 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1186 tree i, max, subtype;
1188 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1189 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1190 subtype = TREE_TYPE (type);
1194 instantiate_missing_elements_1 (elt, i, subtype);
1195 if (tree_int_cst_equal (i, max))
1197 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1204 type = TREE_TYPE (type);
1205 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1206 instantiate_missing_elements_1 (elt, integer_one_node, type);
1214 /* Make one pass across an element tree deciding whether to perform block
1215 or element copies. If we decide on element copies, instantiate all
1216 elements. Return true if there are any instantiated sub-elements. */
1219 decide_block_copy (struct sra_elt *elt)
1224 /* If scalarization is disabled, respect it. */
1225 if (elt->cannot_scalarize)
1227 elt->use_block_copy = 1;
1231 fputs ("Scalarization disabled for ", dump_file);
1232 dump_sra_elt_name (dump_file, elt);
1233 fputc ('\n', dump_file);
1239 /* Don't decide if we've no uses. */
1240 if (elt->n_uses == 0 && elt->n_copies == 0)
1243 else if (!elt->is_scalar)
1245 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1246 bool use_block_copy = true;
1248 /* Don't bother trying to figure out the rest if the structure is
1249 so large we can't do easy arithmetic. This also forces block
1250 copies for variable sized structures. */
1251 if (host_integerp (size_tree, 1))
1253 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1254 unsigned int inst_count;
1256 full_size = tree_low_cst (size_tree, 1);
1258 /* ??? What to do here. If there are two fields, and we've only
1259 instantiated one, then instantiating the other is clearly a win.
1260 If there are a large number of fields then the size of the copy
1261 is much more of a factor. */
1263 /* If the structure is small, and we've made copies, go ahead
1264 and instantiate, hoping that the copies will go away. */
1265 if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
1266 && elt->n_copies > elt->n_uses)
1267 use_block_copy = false;
1270 inst_count = sum_instantiated_sizes (elt, &inst_size);
1272 if (inst_size * 4 >= full_size * 3)
1273 use_block_copy = false;
1276 /* In order to avoid block copy, we have to be able to instantiate
1277 all elements of the type. See if this is possible. */
1279 && (!can_completely_scalarize_p (elt)
1280 || !type_can_instantiate_all_elements (elt->type)))
1281 use_block_copy = true;
1283 elt->use_block_copy = use_block_copy;
1287 fprintf (dump_file, "Using %s for ",
1288 use_block_copy ? "block-copy" : "element-copy");
1289 dump_sra_elt_name (dump_file, elt);
1290 fputc ('\n', dump_file);
1293 if (!use_block_copy)
1295 instantiate_missing_elements (elt);
1300 any_inst = elt->replacement != NULL;
1302 for (c = elt->children; c ; c = c->sibling)
1303 any_inst |= decide_block_copy (c);
1308 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1311 decide_instantiations (void)
1315 struct bitmap_head_def done_head;
1317 /* We cannot clear bits from a bitmap we're iterating over,
1318 so save up all the bits to clear until the end. */
1319 bitmap_initialize (&done_head, 1);
1320 cleared_any = false;
1322 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
1324 tree var = referenced_var (i);
1325 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1328 decide_instantiation_1 (elt, 0, 0);
1329 if (!decide_block_copy (elt))
1334 bitmap_set_bit (&done_head, i);
1341 bitmap_operation (sra_candidates, sra_candidates, &done_head,
1343 bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
1346 bitmap_clear (&done_head);
1349 fputc ('\n', dump_file);
1353 /* Phase Four: Update the function to match the replacements created. */
1355 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1356 renaming. This becomes necessary when we modify all of a non-scalar. */
1359 mark_all_v_defs (tree stmt)
1361 v_may_def_optype v_may_defs;
1362 v_must_def_optype v_must_defs;
1365 get_stmt_operands (stmt);
1367 v_may_defs = V_MAY_DEF_OPS (stmt_ann (stmt));
1368 n = NUM_V_MAY_DEFS (v_may_defs);
1369 for (i = 0; i < n; i++)
1371 tree sym = V_MAY_DEF_RESULT (v_may_defs, i);
1372 if (TREE_CODE (sym) == SSA_NAME)
1373 sym = SSA_NAME_VAR (sym);
1374 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1377 v_must_defs = V_MUST_DEF_OPS (stmt_ann (stmt));
1378 n = NUM_V_MUST_DEFS (v_must_defs);
1379 for (i = 0; i < n; i++)
1381 tree sym = V_MUST_DEF_OP (v_must_defs, i);
1382 if (TREE_CODE (sym) == SSA_NAME)
1383 sym = SSA_NAME_VAR (sym);
1384 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1388 /* Build a single level component reference to ELT rooted at BASE. */
1391 generate_one_element_ref (struct sra_elt *elt, tree base)
1393 switch (TREE_CODE (TREE_TYPE (base)))
1396 return build (COMPONENT_REF, elt->type, base, elt->element, NULL);
1399 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1402 if (elt->element == integer_zero_node)
1403 return build (REALPART_EXPR, elt->type, base);
1405 return build (IMAGPART_EXPR, elt->type, base);
1412 /* Build a full component reference to ELT rooted at its native variable. */
1415 generate_element_ref (struct sra_elt *elt)
1418 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1420 return elt->element;
1423 /* Generate a set of assignment statements in *LIST_P to copy all
1424 instantiated elements under ELT to or from the equivalent structure
1425 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1426 true meaning to copy out of EXPR into ELT. */
1429 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1435 if (elt->replacement)
1438 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1440 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1441 append_to_statement_list (t, list_p);
1445 for (c = elt->children; c ; c = c->sibling)
1447 t = generate_one_element_ref (c, unshare_expr (expr));
1448 generate_copy_inout (c, copy_out, t, list_p);
1453 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1454 elements under SRC to their counterparts under DST. There must be a 1-1
1455 correspondence of instantiated elements. */
1458 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1460 struct sra_elt *dc, *sc;
1462 for (dc = dst->children; dc ; dc = dc->sibling)
1464 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1467 generate_element_copy (dc, sc, list_p);
1470 if (dst->replacement)
1474 if (src->replacement == NULL)
1477 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1479 append_to_statement_list (t, list_p);
1483 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1484 elements under ELT. In addition, do not assign to elements that have been
1485 marked VISITED but do reset the visited flag; this allows easy coordination
1486 with generate_element_init. */
1489 generate_element_zero (struct sra_elt *elt, tree *list_p)
1493 for (c = elt->children; c ; c = c->sibling)
1494 generate_element_zero (c, list_p);
1497 elt->visited = false;
1498 else if (elt->replacement)
1503 t = fold_convert (elt->type, integer_zero_node);
1505 /* We generated a replacement for a non-scalar? */
1508 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1509 append_to_statement_list (t, list_p);
1513 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1514 elements under ELT with the contents of the initializer INIT. In addition,
1515 mark all assigned elements VISITED; this allows easy coordination with
1516 generate_element_zero. Return false if we found a case we couldn't
1520 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1523 enum tree_code init_code;
1524 struct sra_elt *sub;
1527 /* We can be passed DECL_INITIAL of a static variable. It might have a
1528 conversion, which we strip off here. */
1529 STRIP_USELESS_TYPE_CONVERSION (init);
1530 init_code = TREE_CODE (init);
1534 if (elt->replacement)
1536 t = build (MODIFY_EXPR, void_type_node, elt->replacement, init);
1537 append_to_statement_list (t, list_p);
1538 elt->visited = true;
1547 for (sub = elt->children; sub ; sub = sub->sibling)
1549 if (sub->element == integer_zero_node)
1550 t = (init_code == COMPLEX_EXPR
1551 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1553 t = (init_code == COMPLEX_EXPR
1554 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1555 result &= generate_element_init (sub, t, list_p);
1560 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1562 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1565 result &= generate_element_init (sub, TREE_VALUE (t), list_p);
1576 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1577 has more than one edge, STMT will be replicated for each edge. Also,
1578 abnormal edges will be ignored. */
1581 insert_edge_copies (tree stmt, basic_block bb)
1587 for (e = bb->succ; e; e = e->succ_next)
1589 /* We don't need to insert copies on abnormal edges. The
1590 value of the scalar replacement is not guaranteed to
1591 be valid through an abnormal edge. */
1592 if (!(e->flags & EDGE_ABNORMAL))
1596 bsi_insert_on_edge (e, stmt);
1600 bsi_insert_on_edge (e, lhd_unsave_expr_now (stmt));
1605 /* Helper function to insert LIST before BSI, and set up line number info. */
1608 sra_insert_before (block_stmt_iterator *bsi, tree list)
1610 tree stmt = bsi_stmt (*bsi);
1612 if (EXPR_HAS_LOCATION (stmt))
1613 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1614 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1617 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1620 sra_insert_after (block_stmt_iterator *bsi, tree list)
1622 tree stmt = bsi_stmt (*bsi);
1624 if (EXPR_HAS_LOCATION (stmt))
1625 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1627 if (stmt_ends_bb_p (stmt))
1628 insert_edge_copies (list, bsi->bb);
1630 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1633 /* Similarly, but replace the statement at BSI. */
1636 sra_replace (block_stmt_iterator *bsi, tree list)
1638 sra_insert_before (bsi, list);
1640 if (bsi_end_p (*bsi))
1641 *bsi = bsi_last (bsi->bb);
1646 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1647 if elt is scalar, or some occurrence of ELT that requires a complete
1648 aggregate. IS_OUTPUT is true if ELT is being modified. */
1651 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1654 tree list = NULL, stmt = bsi_stmt (*bsi);
1656 if (elt->replacement)
1658 /* If we have a replacement, then updating the reference is as
1659 simple as modifying the existing statement in place. */
1661 mark_all_v_defs (stmt);
1662 *expr_p = elt->replacement;
1667 /* Otherwise we need some copies. If ELT is being read, then we want
1668 to store all (modified) sub-elements back into the structure before
1669 the reference takes place. If ELT is being written, then we want to
1670 load the changed values back into our shadow variables. */
1671 /* ??? We don't check modified for reads, we just always write all of
1672 the values. We should be able to record the SSA number of the VOP
1673 for which the values were last read. If that number matches the
1674 SSA number of the VOP in the current statement, then we needn't
1675 emit an assignment. This would also eliminate double writes when
1676 a structure is passed as more than one argument to a function call.
1677 This optimization would be most effective if sra_walk_function
1678 processed the blocks in dominator order. */
1680 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1685 mark_all_v_defs (expr_first (list));
1686 sra_insert_after (bsi, list);
1689 sra_insert_before (bsi, list);
1693 /* Scalarize a COPY. To recap, this is an assignment statement between
1694 two scalarizable references, LHS_ELT and RHS_ELT. */
1697 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1698 block_stmt_iterator *bsi)
1702 if (lhs_elt->replacement && rhs_elt->replacement)
1704 /* If we have two scalar operands, modify the existing statement. */
1705 stmt = bsi_stmt (*bsi);
1707 #ifdef ENABLE_CHECKING
1708 /* See the commentary in sra_walk_function concerning
1709 RETURN_EXPR, and why we should never see one here. */
1710 if (TREE_CODE (stmt) != MODIFY_EXPR)
1714 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1715 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1718 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1720 /* If either side requires a block copy, then sync the RHS back
1721 to the original structure, leave the original assignment
1722 statement (which will perform the block copy), then load the
1723 LHS values out of its now-updated original structure. */
1724 /* ??? Could perform a modified pair-wise element copy. That
1725 would at least allow those elements that are instantiated in
1726 both structures to be optimized well. */
1729 generate_copy_inout (rhs_elt, false,
1730 generate_element_ref (rhs_elt), &list);
1733 mark_all_v_defs (expr_first (list));
1734 sra_insert_before (bsi, list);
1738 generate_copy_inout (lhs_elt, true,
1739 generate_element_ref (lhs_elt), &list);
1741 sra_insert_after (bsi, list);
1745 /* Otherwise both sides must be fully instantiated. In which
1746 case perform pair-wise element assignments and replace the
1747 original block copy statement. */
1749 stmt = bsi_stmt (*bsi);
1750 mark_all_v_defs (stmt);
1753 generate_element_copy (lhs_elt, rhs_elt, &list);
1756 sra_replace (bsi, list);
1760 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1761 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1762 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1763 CONSTRUCTOR. Return false if we didn't handle this case. */
1766 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1771 /* Generate initialization statements for all members extant in the RHS. */
1773 result = generate_element_init (lhs_elt, rhs, &list);
1775 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1776 a zero value. Initialize the rest of the instantiated elements. */
1777 generate_element_zero (lhs_elt, &list);
1779 /* If we didn't generate anything or couldn't handle this case return.
1780 Say which it was. */
1781 if (!result || list == NULL)
1784 if (lhs_elt->use_block_copy)
1786 /* Since LHS is not fully instantiated, we must leave the structure
1787 assignment in place. Treating this case differently from a USE
1788 exposes constants to later optimizations. */
1789 mark_all_v_defs (expr_first (list));
1790 sra_insert_after (bsi, list);
1794 /* The LHS is fully instantiated. The list of initializations
1795 replaces the original structure assignment. */
1796 mark_all_v_defs (bsi_stmt (*bsi));
1797 sra_replace (bsi, list);
1803 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1804 on all INDIRECT_REFs. */
1807 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1811 if (TREE_CODE (t) == INDIRECT_REF)
1813 TREE_THIS_NOTRAP (t) = 1;
1816 else if (DECL_P (t) || TYPE_P (t))
1822 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1823 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1824 if ELT is on the left-hand side. */
1827 scalarize_ldst (struct sra_elt *elt, tree other,
1828 block_stmt_iterator *bsi, bool is_output)
1830 /* Shouldn't have gotten called for a scalar. */
1831 if (elt->replacement)
1834 if (elt->use_block_copy)
1836 /* Since ELT is not fully instantiated, we have to leave the
1837 block copy in place. Treat this as a USE. */
1838 scalarize_use (elt, NULL, bsi, is_output);
1842 /* The interesting case is when ELT is fully instantiated. In this
1843 case we can have each element stored/loaded directly to/from the
1844 corresponding slot in OTHER. This avoids a block copy. */
1846 tree list = NULL, stmt = bsi_stmt (*bsi);
1848 mark_all_v_defs (stmt);
1849 generate_copy_inout (elt, is_output, other, &list);
1853 /* Preserve EH semantics. */
1854 if (stmt_ends_bb_p (stmt))
1856 tree_stmt_iterator tsi;
1859 /* Extract the first statement from LIST. */
1860 tsi = tsi_start (list);
1861 first = tsi_stmt (tsi);
1864 /* Replace the old statement with this new representative. */
1865 bsi_replace (bsi, first, true);
1867 if (!tsi_end_p (tsi))
1869 /* If any reference would trap, then they all would. And more
1870 to the point, the first would. Therefore none of the rest
1871 will trap since the first didn't. Indicate this by
1872 iterating over the remaining statements and set
1873 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1876 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1879 while (!tsi_end_p (tsi));
1881 insert_edge_copies (list, bsi->bb);
1885 sra_replace (bsi, list);
1889 /* Generate initializations for all scalarizable parameters. */
1892 scalarize_parms (void)
1897 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
1899 tree var = referenced_var (i);
1900 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1901 generate_copy_inout (elt, true, var, &list);
1905 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1908 /* Entry point to phase 4. Update the function to match replacements. */
1911 scalarize_function (void)
1913 static const struct sra_walk_fns fns = {
1914 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
1917 sra_walk_function (&fns);
1919 bsi_commit_edge_inserts (NULL);
1923 /* Debug helper function. Print ELT in a nice human-readable format. */
1926 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
1928 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1930 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
1931 dump_sra_elt_name (f, elt->parent);
1936 dump_sra_elt_name (f, elt->parent);
1937 if (DECL_P (elt->element))
1939 if (TREE_CODE (elt->element) == FIELD_DECL)
1941 print_generic_expr (f, elt->element, dump_flags);
1944 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
1945 TREE_INT_CST_LOW (elt->element));
1949 /* Likewise, but callable from the debugger. */
1952 debug_sra_elt_name (struct sra_elt *elt)
1954 dump_sra_elt_name (stderr, elt);
1955 fputc ('\n', stderr);
1958 /* Main entry point. */
1963 /* Initialize local variables. */
1964 gcc_obstack_init (&sra_obstack);
1965 sra_candidates = BITMAP_XMALLOC ();
1966 needs_copy_in = BITMAP_XMALLOC ();
1967 sra_type_decomp_cache = BITMAP_XMALLOC ();
1968 sra_type_inst_cache = BITMAP_XMALLOC ();
1969 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
1971 /* Scan. If we find anything, instantiate and scalarize. */
1972 if (find_candidates_for_sra ())
1975 decide_instantiations ();
1976 scalarize_function ();
1979 /* Free allocated memory. */
1980 htab_delete (sra_map);
1982 BITMAP_XFREE (sra_candidates);
1983 BITMAP_XFREE (needs_copy_in);
1984 BITMAP_XFREE (sra_type_decomp_cache);
1985 BITMAP_XFREE (sra_type_inst_cache);
1986 obstack_free (&sra_obstack, NULL);
1992 return flag_tree_sra != 0;
1995 struct tree_opt_pass pass_sra =
1998 gate_sra, /* gate */
1999 tree_sra, /* execute */
2002 0, /* static_pass_number */
2003 TV_TREE_SRA, /* tv_id */
2004 PROP_cfg | PROP_ssa, /* properties_required */
2005 0, /* properties_provided */
2006 0, /* properties_destroyed */
2007 0, /* todo_flags_start */
2008 TODO_dump_func | TODO_rename_vars
2009 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */