1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
54 /* This object of this pass is to replace a non-addressable aggregate with a
55 set of independent variables. Most of the time, all of these variables
56 will be scalars. But a secondary objective is to break up larger
57 aggregates into smaller aggregates. In the process we may find that some
58 bits of the larger aggregate can be deleted as unreferenced.
60 This substitution is done globally. More localized substitutions would
61 be the purvey of a load-store motion pass.
63 The optimization proceeds in phases:
65 (1) Identify variables that have types that are candidates for
68 (2) Scan the function looking for the ways these variables are used.
69 In particular we're interested in the number of times a variable
70 (or member) is needed as a complete unit, and the number of times
71 a variable (or member) is copied.
73 (3) Based on the usage profile, instantiate substitution variables.
75 (4) Scan the function making replacements.
79 /* The set of aggregate variables that are candidates for scalarization. */
80 static bitmap sra_candidates;
82 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
83 beginning of the function. */
84 static bitmap needs_copy_in;
86 /* Sets of bit pairs that cache type decomposition and instantiation. */
87 static bitmap sra_type_decomp_cache;
88 static bitmap sra_type_inst_cache;
90 /* One of these structures is created for each candidate aggregate
91 and each (accessed) member of such an aggregate. */
94 /* A tree of the elements. Used when we want to traverse everything. */
95 struct sra_elt *parent;
96 struct sra_elt *children;
97 struct sra_elt *sibling;
99 /* If this element is a root, then this is the VAR_DECL. If this is
100 a sub-element, this is some token used to identify the reference.
101 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
102 of an ARRAY_REF, this is the (constant) index. In the case of a
103 complex number, this is a zero or one. */
106 /* The type of the element. */
109 /* A VAR_DECL, for any sub-element we've decided to replace. */
112 /* The number of times the element is referenced as a whole. I.e.
113 given "a.b.c", this would be incremented for C, but not for A or B. */
116 /* The number of times the element is copied to or from another
117 scalarizable element. */
118 unsigned int n_copies;
120 /* True if TYPE is scalar. */
123 /* True if we saw something about this element that prevents scalarization,
124 such as non-constant indexing. */
125 bool cannot_scalarize;
127 /* True if we've decided that structure-to-structure assignment
128 should happen via memcpy and not per-element. */
131 /* A flag for use with/after random access traversals. */
135 /* Random access to the child of a parent is performed by hashing.
136 This prevents quadratic behavior, and allows SRA to function
137 reasonably on larger records. */
138 static htab_t sra_map;
140 /* All structures are allocated out of the following obstack. */
141 static struct obstack sra_obstack;
143 /* Debugging functions. */
144 static void dump_sra_elt_name (FILE *, struct sra_elt *);
145 extern void debug_sra_elt_name (struct sra_elt *);
148 /* Return true if DECL is an SRA candidate. */
151 is_sra_candidate_decl (tree decl)
153 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
156 /* Return true if TYPE is a scalar type. */
159 is_sra_scalar_type (tree type)
161 enum tree_code code = TREE_CODE (type);
162 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
163 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
164 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
165 || code == REFERENCE_TYPE);
168 /* Return true if TYPE can be decomposed into a set of independent variables.
170 Note that this doesn't imply that all elements of TYPE can be
171 instantiated, just that if we decide to break up the type into
172 separate pieces that it can be done. */
175 type_can_be_decomposed_p (tree type)
177 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
180 /* Avoid searching the same type twice. */
181 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
183 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
186 /* The type must have a definite nonzero size. */
187 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
188 || integer_zerop (TYPE_SIZE (type)))
191 /* The type must be a non-union aggregate. */
192 switch (TREE_CODE (type))
196 bool saw_one_field = false;
198 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
199 if (TREE_CODE (t) == FIELD_DECL)
201 /* Reject incorrectly represented bit fields. */
202 if (DECL_BIT_FIELD (t)
203 && (tree_low_cst (DECL_SIZE (t), 1)
204 != TYPE_PRECISION (TREE_TYPE (t))))
207 saw_one_field = true;
210 /* Record types must have at least one field. */
217 /* Array types must have a fixed lower and upper bound. */
218 t = TYPE_DOMAIN (type);
221 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
223 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
234 bitmap_set_bit (sra_type_decomp_cache, cache+0);
238 bitmap_set_bit (sra_type_decomp_cache, cache+1);
242 /* Return true if DECL can be decomposed into a set of independent
243 (though not necessarily scalar) variables. */
246 decl_can_be_decomposed_p (tree var)
248 /* Early out for scalars. */
249 if (is_sra_scalar_type (TREE_TYPE (var)))
252 /* The variable must not be aliased. */
253 if (!is_gimple_non_addressable (var))
255 if (dump_file && (dump_flags & TDF_DETAILS))
257 fprintf (dump_file, "Cannot scalarize variable ");
258 print_generic_expr (dump_file, var, dump_flags);
259 fprintf (dump_file, " because it must live in memory\n");
264 /* The variable must not be volatile. */
265 if (TREE_THIS_VOLATILE (var))
267 if (dump_file && (dump_flags & TDF_DETAILS))
269 fprintf (dump_file, "Cannot scalarize variable ");
270 print_generic_expr (dump_file, var, dump_flags);
271 fprintf (dump_file, " because it is declared volatile\n");
276 /* We must be able to decompose the variable's type. */
277 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
279 if (dump_file && (dump_flags & TDF_DETAILS))
281 fprintf (dump_file, "Cannot scalarize variable ");
282 print_generic_expr (dump_file, var, dump_flags);
283 fprintf (dump_file, " because its type cannot be decomposed\n");
291 /* Return true if TYPE can be *completely* decomposed into scalars. */
294 type_can_instantiate_all_elements (tree type)
296 if (is_sra_scalar_type (type))
298 if (!type_can_be_decomposed_p (type))
301 switch (TREE_CODE (type))
305 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
308 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
310 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
313 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
314 if (TREE_CODE (f) == FIELD_DECL)
316 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
318 bitmap_set_bit (sra_type_inst_cache, cache+1);
323 bitmap_set_bit (sra_type_inst_cache, cache+0);
328 return type_can_instantiate_all_elements (TREE_TYPE (type));
338 /* Test whether ELT or some sub-element cannot be scalarized. */
341 can_completely_scalarize_p (struct sra_elt *elt)
345 if (elt->cannot_scalarize)
348 for (c = elt->children; c ; c = c->sibling)
349 if (!can_completely_scalarize_p (c))
356 /* A simplified tree hashing algorithm that only handles the types of
357 trees we expect to find in sra_elt->element. */
360 sra_hash_tree (tree t)
364 switch (TREE_CODE (t))
373 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
377 /* We can have types that are compatible, but have different member
378 lists, so we can't hash fields by ID. Use offsets instead. */
379 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
380 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
390 /* Hash function for type SRA_PAIR. */
393 sra_elt_hash (const void *x)
395 const struct sra_elt *e = x;
396 const struct sra_elt *p;
399 h = sra_hash_tree (e->element);
401 /* Take into account everything back up the chain. Given that chain
402 lengths are rarely very long, this should be acceptable. If we
403 truly identify this as a performance problem, it should work to
404 hash the pointer value "e->parent". */
405 for (p = e->parent; p ; p = p->parent)
406 h = (h * 65521) ^ sra_hash_tree (p->element);
411 /* Equality function for type SRA_PAIR. */
414 sra_elt_eq (const void *x, const void *y)
416 const struct sra_elt *a = x;
417 const struct sra_elt *b = y;
420 if (a->parent != b->parent)
428 if (TREE_CODE (ae) != TREE_CODE (be))
431 switch (TREE_CODE (ae))
436 /* These are all pointer unique. */
440 /* Integers are not pointer unique, so compare their values. */
441 return tree_int_cst_equal (ae, be);
444 /* Fields are unique within a record, but not between
445 compatible records. */
446 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
448 return fields_compatible_p (ae, be);
455 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
456 may be null, in which case CHILD must be a DECL. */
458 static struct sra_elt *
459 lookup_element (struct sra_elt *parent, tree child, tree type,
460 enum insert_option insert)
462 struct sra_elt dummy;
463 struct sra_elt **slot;
466 dummy.parent = parent;
467 dummy.element = child;
469 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
470 if (!slot && insert == NO_INSERT)
474 if (!elt && insert == INSERT)
476 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
477 memset (elt, 0, sizeof (*elt));
479 elt->parent = parent;
480 elt->element = child;
482 elt->is_scalar = is_sra_scalar_type (type);
486 elt->sibling = parent->children;
487 parent->children = elt;
490 /* If this is a parameter, then if we want to scalarize, we have
491 one copy from the true function parameter. Count it now. */
492 if (TREE_CODE (child) == PARM_DECL)
495 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
502 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
505 is_valid_const_index (tree expr)
507 tree dom, t, index = TREE_OPERAND (expr, 1);
509 if (TREE_CODE (index) != INTEGER_CST)
512 /* Watch out for stupid user tricks, indexing outside the array.
514 Careful, we're not called only on scalarizable types, so do not
515 assume constant array bounds. We needn't do anything with such
516 cases, since they'll be referring to objects that we should have
517 already rejected for scalarization, so returning false is fine. */
519 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
523 t = TYPE_MIN_VALUE (dom);
524 if (!t || TREE_CODE (t) != INTEGER_CST)
526 if (tree_int_cst_lt (index, t))
529 t = TYPE_MAX_VALUE (dom);
530 if (!t || TREE_CODE (t) != INTEGER_CST)
532 if (tree_int_cst_lt (t, index))
538 /* Create or return the SRA_ELT structure for EXPR if the expression
539 refers to a scalarizable variable. */
541 static struct sra_elt *
542 maybe_lookup_element_for_expr (tree expr)
547 switch (TREE_CODE (expr))
552 if (is_sra_candidate_decl (expr))
553 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
557 /* We can't scalarize variable array indicies. */
558 if (is_valid_const_index (expr))
559 child = TREE_OPERAND (expr, 1);
565 /* Don't look through unions. */
566 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
568 child = TREE_OPERAND (expr, 1);
572 child = integer_zero_node;
575 child = integer_one_node;
582 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
584 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
589 /* Functions to walk just enough of the tree to see all scalarizable
590 references, and categorize them. */
592 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
593 various kinds of references seen. In all cases, *BSI is an iterator
594 pointing to the statement being processed. */
597 /* Invoked when ELT is required as a unit. Note that ELT might refer to
598 a leaf node, in which case this is a simple scalar reference. *EXPR_P
599 points to the location of the expression. IS_OUTPUT is true if this
600 is a left-hand-side reference. */
601 void (*use) (struct sra_elt *elt, tree *expr_p,
602 block_stmt_iterator *bsi, bool is_output);
604 /* Invoked when we have a copy between two scalarizable references. */
605 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
606 block_stmt_iterator *bsi);
608 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
609 in which case it should be treated as an empty CONSTRUCTOR. */
610 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
612 /* Invoked when we have a copy between one scalarizable reference ELT
613 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
614 is on the left-hand side. */
615 void (*ldst) (struct sra_elt *elt, tree other,
616 block_stmt_iterator *bsi, bool is_output);
618 /* True during phase 2, false during phase 4. */
619 /* ??? This is a hack. */
623 #ifdef ENABLE_CHECKING
624 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
627 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
628 void *data ATTRIBUTE_UNUSED)
631 enum tree_code code = TREE_CODE (t);
633 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
636 if (is_sra_candidate_decl (t))
646 /* Walk most expressions looking for a scalarizable aggregate.
647 If we find one, invoke FNS->USE. */
650 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
651 const struct sra_walk_fns *fns)
655 bool disable_scalarization = false;
657 /* We're looking to collect a reference expression between EXPR and INNER,
658 such that INNER is a scalarizable decl and all other nodes through EXPR
659 are references that we can scalarize. If we come across something that
660 we can't scalarize, we reset EXPR. This has the effect of making it
661 appear that we're referring to the larger expression as a whole. */
664 switch (TREE_CODE (inner))
669 /* If there is a scalarizable decl at the bottom, then process it. */
670 if (is_sra_candidate_decl (inner))
672 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
673 if (disable_scalarization)
674 elt->cannot_scalarize = true;
676 fns->use (elt, expr_p, bsi, is_output);
681 /* Non-constant index means any member may be accessed. Prevent the
682 expression from being scalarized. If we were to treat this as a
683 reference to the whole array, we can wind up with a single dynamic
684 index reference inside a loop being overridden by several constant
685 index references during loop setup. It's possible that this could
686 be avoided by using dynamic usage counts based on BB trip counts
687 (based on loop analysis or profiling), but that hardly seems worth
689 /* ??? Hack. Figure out how to push this into the scan routines
690 without duplicating too much code. */
691 if (!is_valid_const_index (inner))
693 disable_scalarization = true;
696 /* ??? Are we assured that non-constant bounds and stride will have
697 the same value everywhere? I don't think Fortran will... */
698 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
700 inner = TREE_OPERAND (inner, 0);
704 /* A reference to a union member constitutes a reference to the
706 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
708 /* ??? See above re non-constant stride. */
709 if (TREE_OPERAND (inner, 2))
711 inner = TREE_OPERAND (inner, 0);
716 inner = TREE_OPERAND (inner, 0);
720 /* A bit field reference (access to *multiple* fields simultaneously)
721 is not currently scalarized. Consider this an access to the
722 complete outer element, to which walk_tree will bring us next. */
725 case ARRAY_RANGE_REF:
726 /* Similarly, an subrange reference is used to modify indexing. Which
727 means that the canonical element names that we have won't work. */
730 case VIEW_CONVERT_EXPR:
732 /* Similarly, a view/nop explicitly wants to look at an object in a
733 type other than the one we've scalarized. */
737 /* This is a transparent wrapper. The entire inner expression really
742 expr_p = &TREE_OPERAND (inner, 0);
743 inner = expr = *expr_p;
747 #ifdef ENABLE_CHECKING
748 /* Validate that we're not missing any references. */
749 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
755 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
756 If we find one, invoke FNS->USE. */
759 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
760 const struct sra_walk_fns *fns)
763 for (op = list; op ; op = TREE_CHAIN (op))
764 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
767 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
768 If we find one, invoke FNS->USE. */
771 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
772 const struct sra_walk_fns *fns)
774 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
777 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
778 aggregates. If we find one, invoke FNS->USE. */
781 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
782 const struct sra_walk_fns *fns)
784 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
785 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
788 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
791 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
792 const struct sra_walk_fns *fns)
794 struct sra_elt *lhs_elt, *rhs_elt;
797 lhs = TREE_OPERAND (expr, 0);
798 rhs = TREE_OPERAND (expr, 1);
799 lhs_elt = maybe_lookup_element_for_expr (lhs);
800 rhs_elt = maybe_lookup_element_for_expr (rhs);
802 /* If both sides are scalarizable, this is a COPY operation. */
803 if (lhs_elt && rhs_elt)
805 fns->copy (lhs_elt, rhs_elt, bsi);
809 /* If the RHS is scalarizable, handle it. There are only two cases. */
812 if (!rhs_elt->is_scalar)
813 fns->ldst (rhs_elt, lhs, bsi, false);
815 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
818 /* If it isn't scalarizable, there may be scalarizable variables within, so
819 check for a call or else walk the RHS to see if we need to do any
820 copy-in operations. We need to do it before the LHS is scalarized so
821 that the statements get inserted in the proper place, before any
822 copy-out operations. */
825 tree call = get_call_expr_in (rhs);
827 sra_walk_call_expr (call, bsi, fns);
829 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
832 /* Likewise, handle the LHS being scalarizable. We have cases similar
833 to those above, but also want to handle RHS being constant. */
836 /* If this is an assignment from a constant, or constructor, then
837 we have access to all of the elements individually. Invoke INIT. */
838 if (TREE_CODE (rhs) == COMPLEX_EXPR
839 || TREE_CODE (rhs) == COMPLEX_CST
840 || TREE_CODE (rhs) == CONSTRUCTOR)
841 fns->init (lhs_elt, rhs, bsi);
843 /* If this is an assignment from read-only memory, treat this as if
844 we'd been passed the constructor directly. Invoke INIT. */
845 else if (TREE_CODE (rhs) == VAR_DECL
847 && TREE_READONLY (rhs)
848 && targetm.binds_local_p (rhs))
849 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
851 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
852 The lvalue requirement prevents us from trying to directly scalarize
853 the result of a function call. Which would result in trying to call
854 the function multiple times, and other evil things. */
855 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
856 fns->ldst (lhs_elt, rhs, bsi, true);
858 /* Otherwise we're being used in some context that requires the
859 aggregate to be seen as a whole. Invoke USE. */
861 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
864 /* Similarly to above, LHS_ELT being null only means that the LHS as a
865 whole is not a scalarizable reference. There may be occurrences of
866 scalarizable variables within, which implies a USE. */
868 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
871 /* Entry point to the walk functions. Search the entire function,
872 invoking the callbacks in FNS on each of the references to
873 scalarizable variables. */
876 sra_walk_function (const struct sra_walk_fns *fns)
879 block_stmt_iterator si, ni;
881 /* ??? Phase 4 could derive some benefit to walking the function in
882 dominator tree order. */
885 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
890 stmt = bsi_stmt (si);
891 ann = stmt_ann (stmt);
896 /* If the statement has no virtual operands, then it doesn't
897 make any structure references that we care about. */
898 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
899 && NUM_VUSES (VUSE_OPS (ann)) == 0
900 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
903 switch (TREE_CODE (stmt))
906 /* If we have "return <retval>" then the return value is
907 already exposed for our pleasure. Walk it as a USE to
908 force all the components back in place for the return.
910 If we have an embedded assignment, then <retval> is of
911 a type that gets returned in registers in this ABI, and
912 we do not wish to extend their lifetimes. Treat this
913 as a USE of the variable on the RHS of this assignment. */
915 t = TREE_OPERAND (stmt, 0);
916 if (TREE_CODE (t) == MODIFY_EXPR)
917 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
919 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
923 sra_walk_modify_expr (stmt, &si, fns);
926 sra_walk_call_expr (stmt, &si, fns);
929 sra_walk_asm_expr (stmt, &si, fns);
938 /* Phase One: Scan all referenced variables in the program looking for
939 structures that could be decomposed. */
942 find_candidates_for_sra (void)
945 bool any_set = false;
947 for (i = 0; i < num_referenced_vars; i++)
949 tree var = referenced_var (i);
950 if (decl_can_be_decomposed_p (var))
952 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
961 /* Phase Two: Scan all references to scalarizable variables. Count the
962 number of times they are used or copied respectively. */
964 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
965 considered a copy, because we can decompose the reference such that
966 the sub-elements needn't be contiguous. */
969 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
970 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
971 bool is_output ATTRIBUTE_UNUSED)
977 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
978 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
980 lhs_elt->n_copies += 1;
981 rhs_elt->n_copies += 1;
985 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
986 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
988 lhs_elt->n_copies += 1;
992 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
993 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
994 bool is_output ATTRIBUTE_UNUSED)
999 /* Dump the values we collected during the scanning phase. */
1002 scan_dump (struct sra_elt *elt)
1006 dump_sra_elt_name (dump_file, elt);
1007 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1009 for (c = elt->children; c ; c = c->sibling)
1013 /* Entry point to phase 2. Scan the entire function, building up
1014 scalarization data structures, recording copies and uses. */
1017 scan_function (void)
1019 static const struct sra_walk_fns fns = {
1020 scan_use, scan_copy, scan_init, scan_ldst, true
1024 sra_walk_function (&fns);
1026 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fputs ("\nScan results:\n", dump_file);
1031 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1033 tree var = referenced_var (i);
1034 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1038 fputc ('\n', dump_file);
1042 /* Phase Three: Make decisions about which variables to scalarize, if any.
1043 All elements to be scalarized have replacement variables made for them. */
1045 /* A subroutine of build_element_name. Recursively build the element
1046 name on the obstack. */
1049 build_element_name_1 (struct sra_elt *elt)
1056 build_element_name_1 (elt->parent);
1057 obstack_1grow (&sra_obstack, '$');
1059 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1061 if (elt->element == integer_zero_node)
1062 obstack_grow (&sra_obstack, "real", 4);
1064 obstack_grow (&sra_obstack, "imag", 4);
1070 if (TREE_CODE (t) == INTEGER_CST)
1072 /* ??? Eh. Don't bother doing double-wide printing. */
1073 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1074 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1078 tree name = DECL_NAME (t);
1080 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1081 IDENTIFIER_LENGTH (name));
1084 sprintf (buffer, "D%u", DECL_UID (t));
1085 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1090 /* Construct a pretty variable name for an element's replacement variable.
1091 The name is built on the obstack. */
1094 build_element_name (struct sra_elt *elt)
1096 build_element_name_1 (elt);
1097 obstack_1grow (&sra_obstack, '\0');
1098 return obstack_finish (&sra_obstack);
1101 /* Instantiate an element as an independent variable. */
1104 instantiate_element (struct sra_elt *elt)
1106 struct sra_elt *base_elt;
1109 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1111 base = base_elt->element;
1113 elt->replacement = var = make_rename_temp (elt->type, "SR");
1114 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1115 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1116 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1117 DECL_IGNORED_P (var) = DECL_IGNORED_P (base);
1119 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1121 char *pretty_name = build_element_name (elt);
1122 DECL_NAME (var) = get_identifier (pretty_name);
1123 obstack_free (&sra_obstack, pretty_name);
1128 fputs (" ", dump_file);
1129 dump_sra_elt_name (dump_file, elt);
1130 fputs (" -> ", dump_file);
1131 print_generic_expr (dump_file, var, dump_flags);
1132 fputc ('\n', dump_file);
1136 /* Make one pass across an element tree deciding whether or not it's
1137 profitable to instantiate individual leaf scalars.
1139 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1140 fields all the way up the tree. */
1143 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1144 unsigned int parent_copies)
1146 if (dump_file && !elt->parent)
1148 fputs ("Initial instantiation for ", dump_file);
1149 dump_sra_elt_name (dump_file, elt);
1150 fputc ('\n', dump_file);
1153 if (elt->cannot_scalarize)
1158 /* The decision is simple: instantiate if we're used more frequently
1159 than the parent needs to be seen as a complete unit. */
1160 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1161 instantiate_element (elt);
1166 unsigned int this_uses = elt->n_uses + parent_uses;
1167 unsigned int this_copies = elt->n_copies + parent_copies;
1169 for (c = elt->children; c ; c = c->sibling)
1170 decide_instantiation_1 (c, this_uses, this_copies);
1174 /* Compute the size and number of all instantiated elements below ELT.
1175 We will only care about this if the size of the complete structure
1176 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1179 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1181 if (elt->replacement)
1183 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1189 unsigned int count = 0;
1191 for (c = elt->children; c ; c = c->sibling)
1192 count += sum_instantiated_sizes (c, sizep);
1198 /* Instantiate fields in ELT->TYPE that are not currently present as
1201 static void instantiate_missing_elements (struct sra_elt *elt);
1204 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1206 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1209 if (sub->replacement == NULL)
1210 instantiate_element (sub);
1213 instantiate_missing_elements (sub);
1217 instantiate_missing_elements (struct sra_elt *elt)
1219 tree type = elt->type;
1221 switch (TREE_CODE (type))
1226 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1227 if (TREE_CODE (f) == FIELD_DECL)
1228 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1234 tree i, max, subtype;
1236 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1237 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1238 subtype = TREE_TYPE (type);
1242 instantiate_missing_elements_1 (elt, i, subtype);
1243 if (tree_int_cst_equal (i, max))
1245 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1252 type = TREE_TYPE (type);
1253 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1254 instantiate_missing_elements_1 (elt, integer_one_node, type);
1262 /* Make one pass across an element tree deciding whether to perform block
1263 or element copies. If we decide on element copies, instantiate all
1264 elements. Return true if there are any instantiated sub-elements. */
1267 decide_block_copy (struct sra_elt *elt)
1272 /* If scalarization is disabled, respect it. */
1273 if (elt->cannot_scalarize)
1275 elt->use_block_copy = 1;
1279 fputs ("Scalarization disabled for ", dump_file);
1280 dump_sra_elt_name (dump_file, elt);
1281 fputc ('\n', dump_file);
1287 /* Don't decide if we've no uses. */
1288 if (elt->n_uses == 0 && elt->n_copies == 0)
1291 else if (!elt->is_scalar)
1293 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1294 bool use_block_copy = true;
1296 /* Don't bother trying to figure out the rest if the structure is
1297 so large we can't do easy arithmetic. This also forces block
1298 copies for variable sized structures. */
1299 if (host_integerp (size_tree, 1))
1301 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1302 unsigned int inst_count;
1303 unsigned int max_size;
1305 /* If the sra-max-structure-size parameter is 0, then the
1306 user has not overridden the parameter and we can choose a
1307 sensible default. */
1308 max_size = SRA_MAX_STRUCTURE_SIZE
1309 ? SRA_MAX_STRUCTURE_SIZE
1310 : MOVE_RATIO * UNITS_PER_WORD;
1312 full_size = tree_low_cst (size_tree, 1);
1314 /* ??? What to do here. If there are two fields, and we've only
1315 instantiated one, then instantiating the other is clearly a win.
1316 If there are a large number of fields then the size of the copy
1317 is much more of a factor. */
1319 /* If the structure is small, and we've made copies, go ahead
1320 and instantiate, hoping that the copies will go away. */
1321 if (full_size <= max_size
1322 && elt->n_copies > elt->n_uses)
1323 use_block_copy = false;
1326 inst_count = sum_instantiated_sizes (elt, &inst_size);
1328 if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1329 use_block_copy = false;
1332 /* In order to avoid block copy, we have to be able to instantiate
1333 all elements of the type. See if this is possible. */
1335 && (!can_completely_scalarize_p (elt)
1336 || !type_can_instantiate_all_elements (elt->type)))
1337 use_block_copy = true;
1339 elt->use_block_copy = use_block_copy;
1343 fprintf (dump_file, "Using %s for ",
1344 use_block_copy ? "block-copy" : "element-copy");
1345 dump_sra_elt_name (dump_file, elt);
1346 fputc ('\n', dump_file);
1349 if (!use_block_copy)
1351 instantiate_missing_elements (elt);
1356 any_inst = elt->replacement != NULL;
1358 for (c = elt->children; c ; c = c->sibling)
1359 any_inst |= decide_block_copy (c);
1364 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1367 decide_instantiations (void)
1371 bitmap_head done_head;
1374 /* We cannot clear bits from a bitmap we're iterating over,
1375 so save up all the bits to clear until the end. */
1376 bitmap_initialize (&done_head, &bitmap_default_obstack);
1377 cleared_any = false;
1379 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1381 tree var = referenced_var (i);
1382 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1385 decide_instantiation_1 (elt, 0, 0);
1386 if (!decide_block_copy (elt))
1391 bitmap_set_bit (&done_head, i);
1398 bitmap_and_compl_into (sra_candidates, &done_head);
1399 bitmap_and_compl_into (needs_copy_in, &done_head);
1401 bitmap_clear (&done_head);
1404 fputc ('\n', dump_file);
1408 /* Phase Four: Update the function to match the replacements created. */
1410 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1411 renaming. This becomes necessary when we modify all of a non-scalar. */
1414 mark_all_v_defs (tree stmt)
1419 get_stmt_operands (stmt);
1421 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
1423 if (TREE_CODE (sym) == SSA_NAME)
1424 sym = SSA_NAME_VAR (sym);
1425 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1429 /* Build a single level component reference to ELT rooted at BASE. */
1432 generate_one_element_ref (struct sra_elt *elt, tree base)
1434 switch (TREE_CODE (TREE_TYPE (base)))
1438 tree field = elt->element;
1440 /* Watch out for compatible records with differing field lists. */
1441 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1442 field = find_compatible_field (TREE_TYPE (base), field);
1444 return build (COMPONENT_REF, elt->type, base, field, NULL);
1448 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1451 if (elt->element == integer_zero_node)
1452 return build (REALPART_EXPR, elt->type, base);
1454 return build (IMAGPART_EXPR, elt->type, base);
1461 /* Build a full component reference to ELT rooted at its native variable. */
1464 generate_element_ref (struct sra_elt *elt)
1467 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1469 return elt->element;
1472 /* Generate a set of assignment statements in *LIST_P to copy all
1473 instantiated elements under ELT to or from the equivalent structure
1474 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1475 true meaning to copy out of EXPR into ELT. */
1478 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1484 if (elt->replacement)
1487 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1489 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1490 append_to_statement_list (t, list_p);
1494 for (c = elt->children; c ; c = c->sibling)
1496 t = generate_one_element_ref (c, unshare_expr (expr));
1497 generate_copy_inout (c, copy_out, t, list_p);
1502 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1503 elements under SRC to their counterparts under DST. There must be a 1-1
1504 correspondence of instantiated elements. */
1507 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1509 struct sra_elt *dc, *sc;
1511 for (dc = dst->children; dc ; dc = dc->sibling)
1513 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1515 generate_element_copy (dc, sc, list_p);
1518 if (dst->replacement)
1522 gcc_assert (src->replacement);
1524 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1526 append_to_statement_list (t, list_p);
1530 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1531 elements under ELT. In addition, do not assign to elements that have been
1532 marked VISITED but do reset the visited flag; this allows easy coordination
1533 with generate_element_init. */
1536 generate_element_zero (struct sra_elt *elt, tree *list_p)
1542 elt->visited = false;
1546 for (c = elt->children; c ; c = c->sibling)
1547 generate_element_zero (c, list_p);
1549 if (elt->replacement)
1553 gcc_assert (elt->is_scalar);
1554 t = fold_convert (elt->type, integer_zero_node);
1556 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1557 append_to_statement_list (t, list_p);
1561 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1562 Add the result to *LIST_P. */
1565 generate_one_element_init (tree var, tree init, tree *list_p)
1569 /* The replacement can be almost arbitrarily complex. Gimplify. */
1570 stmt = build (MODIFY_EXPR, void_type_node, var, init);
1571 gimplify_stmt (&stmt);
1573 /* The replacement can expose previously unreferenced variables. */
1574 if (TREE_CODE (stmt) == STATEMENT_LIST)
1576 tree_stmt_iterator i;
1577 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1578 find_new_referenced_vars (tsi_stmt_ptr (i));
1581 find_new_referenced_vars (&stmt);
1583 append_to_statement_list (stmt, list_p);
1586 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1587 elements under ELT with the contents of the initializer INIT. In addition,
1588 mark all assigned elements VISITED; this allows easy coordination with
1589 generate_element_zero. Return false if we found a case we couldn't
1593 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1596 enum tree_code init_code;
1597 struct sra_elt *sub;
1600 /* We can be passed DECL_INITIAL of a static variable. It might have a
1601 conversion, which we strip off here. */
1602 STRIP_USELESS_TYPE_CONVERSION (init);
1603 init_code = TREE_CODE (init);
1607 if (elt->replacement)
1609 generate_one_element_init (elt->replacement, init, list_p);
1610 elt->visited = true;
1619 for (sub = elt->children; sub ; sub = sub->sibling)
1621 if (sub->element == integer_zero_node)
1622 t = (init_code == COMPLEX_EXPR
1623 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1625 t = (init_code == COMPLEX_EXPR
1626 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1627 result &= generate_element_init (sub, t, list_p);
1632 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1634 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1637 result &= generate_element_init (sub, TREE_VALUE (t), list_p);
1642 elt->visited = true;
1649 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1650 has more than one edge, STMT will be replicated for each edge. Also,
1651 abnormal edges will be ignored. */
1654 insert_edge_copies (tree stmt, basic_block bb)
1661 FOR_EACH_EDGE (e, ei, bb->succs)
1663 /* We don't need to insert copies on abnormal edges. The
1664 value of the scalar replacement is not guaranteed to
1665 be valid through an abnormal edge. */
1666 if (!(e->flags & EDGE_ABNORMAL))
1670 bsi_insert_on_edge (e, stmt);
1674 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1679 /* Helper function to insert LIST before BSI, and set up line number info. */
1682 sra_insert_before (block_stmt_iterator *bsi, tree list)
1684 tree stmt = bsi_stmt (*bsi);
1686 if (EXPR_HAS_LOCATION (stmt))
1687 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1688 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1691 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1694 sra_insert_after (block_stmt_iterator *bsi, tree list)
1696 tree stmt = bsi_stmt (*bsi);
1698 if (EXPR_HAS_LOCATION (stmt))
1699 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1701 if (stmt_ends_bb_p (stmt))
1702 insert_edge_copies (list, bsi->bb);
1704 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1707 /* Similarly, but replace the statement at BSI. */
1710 sra_replace (block_stmt_iterator *bsi, tree list)
1712 sra_insert_before (bsi, list);
1714 if (bsi_end_p (*bsi))
1715 *bsi = bsi_last (bsi->bb);
1720 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1721 if elt is scalar, or some occurrence of ELT that requires a complete
1722 aggregate. IS_OUTPUT is true if ELT is being modified. */
1725 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1728 tree list = NULL, stmt = bsi_stmt (*bsi);
1730 if (elt->replacement)
1732 /* If we have a replacement, then updating the reference is as
1733 simple as modifying the existing statement in place. */
1735 mark_all_v_defs (stmt);
1736 *expr_p = elt->replacement;
1741 /* Otherwise we need some copies. If ELT is being read, then we want
1742 to store all (modified) sub-elements back into the structure before
1743 the reference takes place. If ELT is being written, then we want to
1744 load the changed values back into our shadow variables. */
1745 /* ??? We don't check modified for reads, we just always write all of
1746 the values. We should be able to record the SSA number of the VOP
1747 for which the values were last read. If that number matches the
1748 SSA number of the VOP in the current statement, then we needn't
1749 emit an assignment. This would also eliminate double writes when
1750 a structure is passed as more than one argument to a function call.
1751 This optimization would be most effective if sra_walk_function
1752 processed the blocks in dominator order. */
1754 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1757 mark_all_v_defs (expr_first (list));
1759 sra_insert_after (bsi, list);
1761 sra_insert_before (bsi, list);
1765 /* Scalarize a COPY. To recap, this is an assignment statement between
1766 two scalarizable references, LHS_ELT and RHS_ELT. */
1769 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1770 block_stmt_iterator *bsi)
1774 if (lhs_elt->replacement && rhs_elt->replacement)
1776 /* If we have two scalar operands, modify the existing statement. */
1777 stmt = bsi_stmt (*bsi);
1779 /* See the commentary in sra_walk_function concerning
1780 RETURN_EXPR, and why we should never see one here. */
1781 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
1783 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1784 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1787 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1789 /* If either side requires a block copy, then sync the RHS back
1790 to the original structure, leave the original assignment
1791 statement (which will perform the block copy), then load the
1792 LHS values out of its now-updated original structure. */
1793 /* ??? Could perform a modified pair-wise element copy. That
1794 would at least allow those elements that are instantiated in
1795 both structures to be optimized well. */
1798 generate_copy_inout (rhs_elt, false,
1799 generate_element_ref (rhs_elt), &list);
1802 mark_all_v_defs (expr_first (list));
1803 sra_insert_before (bsi, list);
1807 generate_copy_inout (lhs_elt, true,
1808 generate_element_ref (lhs_elt), &list);
1810 sra_insert_after (bsi, list);
1814 /* Otherwise both sides must be fully instantiated. In which
1815 case perform pair-wise element assignments and replace the
1816 original block copy statement. */
1818 stmt = bsi_stmt (*bsi);
1819 mark_all_v_defs (stmt);
1822 generate_element_copy (lhs_elt, rhs_elt, &list);
1824 sra_replace (bsi, list);
1828 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1829 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1830 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1834 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1839 /* Generate initialization statements for all members extant in the RHS. */
1842 /* Unshare the expression just in case this is from a decl's initial. */
1843 rhs = unshare_expr (rhs);
1844 push_gimplify_context ();
1845 result = generate_element_init (lhs_elt, rhs, &list);
1846 pop_gimplify_context (NULL);
1849 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1850 a zero value. Initialize the rest of the instantiated elements. */
1851 generate_element_zero (lhs_elt, &list);
1855 /* If we failed to convert the entire initializer, then we must
1856 leave the structure assignment in place and must load values
1857 from the structure into the slots for which we did not find
1858 constants. The easiest way to do this is to generate a complete
1859 copy-out, and then follow that with the constant assignments
1860 that we were able to build. DCE will clean things up. */
1862 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
1864 append_to_statement_list (list, &list0);
1868 if (lhs_elt->use_block_copy || !result)
1870 /* Since LHS is not fully instantiated, we must leave the structure
1871 assignment in place. Treating this case differently from a USE
1872 exposes constants to later optimizations. */
1875 mark_all_v_defs (expr_first (list));
1876 sra_insert_after (bsi, list);
1881 /* The LHS is fully instantiated. The list of initializations
1882 replaces the original structure assignment. */
1884 mark_all_v_defs (bsi_stmt (*bsi));
1885 sra_replace (bsi, list);
1889 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1890 on all INDIRECT_REFs. */
1893 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1897 if (TREE_CODE (t) == INDIRECT_REF)
1899 TREE_THIS_NOTRAP (t) = 1;
1902 else if (IS_TYPE_OR_DECL_P (t))
1908 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1909 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1910 if ELT is on the left-hand side. */
1913 scalarize_ldst (struct sra_elt *elt, tree other,
1914 block_stmt_iterator *bsi, bool is_output)
1916 /* Shouldn't have gotten called for a scalar. */
1917 gcc_assert (!elt->replacement);
1919 if (elt->use_block_copy)
1921 /* Since ELT is not fully instantiated, we have to leave the
1922 block copy in place. Treat this as a USE. */
1923 scalarize_use (elt, NULL, bsi, is_output);
1927 /* The interesting case is when ELT is fully instantiated. In this
1928 case we can have each element stored/loaded directly to/from the
1929 corresponding slot in OTHER. This avoids a block copy. */
1931 tree list = NULL, stmt = bsi_stmt (*bsi);
1933 mark_all_v_defs (stmt);
1934 generate_copy_inout (elt, is_output, other, &list);
1937 /* Preserve EH semantics. */
1938 if (stmt_ends_bb_p (stmt))
1940 tree_stmt_iterator tsi;
1943 /* Extract the first statement from LIST. */
1944 tsi = tsi_start (list);
1945 first = tsi_stmt (tsi);
1948 /* Replace the old statement with this new representative. */
1949 bsi_replace (bsi, first, true);
1951 if (!tsi_end_p (tsi))
1953 /* If any reference would trap, then they all would. And more
1954 to the point, the first would. Therefore none of the rest
1955 will trap since the first didn't. Indicate this by
1956 iterating over the remaining statements and set
1957 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1960 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1963 while (!tsi_end_p (tsi));
1965 insert_edge_copies (list, bsi->bb);
1969 sra_replace (bsi, list);
1973 /* Generate initializations for all scalarizable parameters. */
1976 scalarize_parms (void)
1982 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
1984 tree var = referenced_var (i);
1985 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1986 generate_copy_inout (elt, true, var, &list);
1990 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1993 /* Entry point to phase 4. Update the function to match replacements. */
1996 scalarize_function (void)
1998 static const struct sra_walk_fns fns = {
1999 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
2002 sra_walk_function (&fns);
2004 bsi_commit_edge_inserts ();
2008 /* Debug helper function. Print ELT in a nice human-readable format. */
2011 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
2013 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
2015 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2016 dump_sra_elt_name (f, elt->parent);
2021 dump_sra_elt_name (f, elt->parent);
2022 if (DECL_P (elt->element))
2024 if (TREE_CODE (elt->element) == FIELD_DECL)
2026 print_generic_expr (f, elt->element, dump_flags);
2029 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2030 TREE_INT_CST_LOW (elt->element));
2034 /* Likewise, but callable from the debugger. */
2037 debug_sra_elt_name (struct sra_elt *elt)
2039 dump_sra_elt_name (stderr, elt);
2040 fputc ('\n', stderr);
2043 /* Main entry point. */
2048 /* Initialize local variables. */
2049 gcc_obstack_init (&sra_obstack);
2050 sra_candidates = BITMAP_XMALLOC ();
2051 needs_copy_in = BITMAP_XMALLOC ();
2052 sra_type_decomp_cache = BITMAP_XMALLOC ();
2053 sra_type_inst_cache = BITMAP_XMALLOC ();
2054 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2056 /* Scan. If we find anything, instantiate and scalarize. */
2057 if (find_candidates_for_sra ())
2060 decide_instantiations ();
2061 scalarize_function ();
2064 /* Free allocated memory. */
2065 htab_delete (sra_map);
2067 BITMAP_XFREE (sra_candidates);
2068 BITMAP_XFREE (needs_copy_in);
2069 BITMAP_XFREE (sra_type_decomp_cache);
2070 BITMAP_XFREE (sra_type_inst_cache);
2071 obstack_free (&sra_obstack, NULL);
2077 return flag_tree_sra != 0;
2080 struct tree_opt_pass pass_sra =
2083 gate_sra, /* gate */
2084 tree_sra, /* execute */
2087 0, /* static_pass_number */
2088 TV_TREE_SRA, /* tv_id */
2089 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2090 0, /* properties_provided */
2091 0, /* properties_destroyed */
2092 0, /* todo_flags_start */
2093 TODO_dump_func | TODO_rename_vars
2094 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */