1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behaviour, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
147 /* Return true if DECL is an SRA candidate. */
150 is_sra_candidate_decl (tree decl)
152 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
155 /* Return true if TYPE is a scalar type. */
158 is_sra_scalar_type (tree type)
160 enum tree_code code = TREE_CODE (type);
161 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
162 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
163 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
164 || code == REFERENCE_TYPE);
167 /* Return true if TYPE can be decomposed into a set of independent variables.
169 Note that this doesn't imply that all elements of TYPE can be
170 instantiated, just that if we decide to break up the type into
171 separate pieces that it can be done. */
174 type_can_be_decomposed_p (tree type)
176 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
179 /* Avoid searching the same type twice. */
180 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
185 /* The type must have a definite non-zero size. */
186 if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
189 /* The type must be a non-union aggregate. */
190 switch (TREE_CODE (type))
194 bool saw_one_field = false;
196 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
197 if (TREE_CODE (t) == FIELD_DECL)
199 /* Reject incorrectly represented bit fields. */
200 if (DECL_BIT_FIELD (t)
201 && (tree_low_cst (DECL_SIZE (t), 1)
202 != TYPE_PRECISION (TREE_TYPE (t))))
205 saw_one_field = true;
208 /* Record types must have at least one field. */
215 /* Array types must have a fixed lower and upper bound. */
216 t = TYPE_DOMAIN (type);
219 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
221 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
232 bitmap_set_bit (sra_type_decomp_cache, cache+0);
236 bitmap_set_bit (sra_type_decomp_cache, cache+1);
240 /* Return true if DECL can be decomposed into a set of independent
241 (though not necessarily scalar) variables. */
244 decl_can_be_decomposed_p (tree var)
246 /* Early out for scalars. */
247 if (is_sra_scalar_type (TREE_TYPE (var)))
250 /* The variable must not be aliased. */
251 if (!is_gimple_non_addressable (var))
253 if (dump_file && (dump_flags & TDF_DETAILS))
255 fprintf (dump_file, "Cannot scalarize variable ");
256 print_generic_expr (dump_file, var, dump_flags);
257 fprintf (dump_file, " because it must live in memory\n");
262 /* The variable must not be volatile. */
263 if (TREE_THIS_VOLATILE (var))
265 if (dump_file && (dump_flags & TDF_DETAILS))
267 fprintf (dump_file, "Cannot scalarize variable ");
268 print_generic_expr (dump_file, var, dump_flags);
269 fprintf (dump_file, " because it is declared volatile\n");
274 /* We must be able to decompose the variable's type. */
275 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
277 if (dump_file && (dump_flags & TDF_DETAILS))
279 fprintf (dump_file, "Cannot scalarize variable ");
280 print_generic_expr (dump_file, var, dump_flags);
281 fprintf (dump_file, " because its type cannot be decomposed\n");
289 /* Return true if TYPE can be *completely* decomposed into scalars. */
292 type_can_instantiate_all_elements (tree type)
294 if (is_sra_scalar_type (type))
296 if (!type_can_be_decomposed_p (type))
299 switch (TREE_CODE (type))
303 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
308 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
312 if (TREE_CODE (f) == FIELD_DECL)
314 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
316 bitmap_set_bit (sra_type_inst_cache, cache+1);
321 bitmap_set_bit (sra_type_inst_cache, cache+0);
326 return type_can_instantiate_all_elements (TREE_TYPE (type));
336 /* Test whether ELT or some sub-element cannot be scalarized. */
339 can_completely_scalarize_p (struct sra_elt *elt)
343 if (elt->cannot_scalarize)
346 for (c = elt->children; c ; c = c->sibling)
347 if (!can_completely_scalarize_p (c))
354 /* A simplified tree hashing algorithm that only handles the types of
355 trees we expect to find in sra_elt->element. */
358 sra_hash_tree (tree t)
360 switch (TREE_CODE (t))
368 return TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
374 /* Hash function for type SRA_PAIR. */
377 sra_elt_hash (const void *x)
379 const struct sra_elt *e = x;
380 const struct sra_elt *p;
383 h = sra_hash_tree (e->element);
385 /* Take into account everything back up the chain. Given that chain
386 lengths are rarely very long, this should be acceptable. If we
387 truely identify this as a performance problem, it should work to
388 hash the pointer value "e->parent". */
389 for (p = e->parent; p ; p = p->parent)
390 h = (h * 65521) ^ sra_hash_tree (p->element);
395 /* Equality function for type SRA_PAIR. */
398 sra_elt_eq (const void *x, const void *y)
400 const struct sra_elt *a = x;
401 const struct sra_elt *b = y;
403 if (a->parent != b->parent)
406 /* All the field/decl stuff is unique. */
407 if (a->element == b->element)
410 /* The only thing left is integer equality. */
411 if (TREE_CODE (a->element) == INTEGER_CST
412 && TREE_CODE (b->element) == INTEGER_CST)
413 return tree_int_cst_equal (a->element, b->element);
418 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
419 may be null, in which case CHILD must be a DECL. */
421 static struct sra_elt *
422 lookup_element (struct sra_elt *parent, tree child, tree type,
423 enum insert_option insert)
425 struct sra_elt dummy;
426 struct sra_elt **slot;
429 dummy.parent = parent;
430 dummy.element = child;
432 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
433 if (!slot && insert == NO_INSERT)
437 if (!elt && insert == INSERT)
439 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
440 memset (elt, 0, sizeof (*elt));
442 elt->parent = parent;
443 elt->element = child;
445 elt->is_scalar = is_sra_scalar_type (type);
449 elt->sibling = parent->children;
450 parent->children = elt;
453 /* If this is a parameter, then if we want to scalarize, we have
454 one copy from the true function parameter. Count it now. */
455 if (TREE_CODE (child) == PARM_DECL)
458 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
465 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
468 is_valid_const_index (tree expr)
470 tree dom, t, index = TREE_OPERAND (expr, 1);
472 if (TREE_CODE (index) != INTEGER_CST)
475 /* Watch out for stupid user tricks, indexing outside the array.
477 Careful, we're not called only on scalarizable types, so do not
478 assume constant array bounds. We needn't do anything with such
479 cases, since they'll be referring to objects that we should have
480 already rejected for scalarization, so returning false is fine. */
482 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
486 t = TYPE_MIN_VALUE (dom);
487 if (!t || TREE_CODE (t) != INTEGER_CST)
489 if (tree_int_cst_lt (index, t))
492 t = TYPE_MAX_VALUE (dom);
493 if (!t || TREE_CODE (t) != INTEGER_CST)
495 if (tree_int_cst_lt (t, index))
501 /* Create or return the SRA_ELT structure for EXPR if the expression
502 refers to a scalarizable variable. */
504 static struct sra_elt *
505 maybe_lookup_element_for_expr (tree expr)
510 switch (TREE_CODE (expr))
515 if (is_sra_candidate_decl (expr))
516 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
520 /* We can't scalarize variable array indicies. */
521 if (is_valid_const_index (expr))
522 child = TREE_OPERAND (expr, 1);
528 /* Don't look through unions. */
529 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
531 child = TREE_OPERAND (expr, 1);
535 child = integer_zero_node;
538 child = integer_one_node;
545 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
547 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
552 /* Functions to walk just enough of the tree to see all scalarizable
553 references, and categorize them. */
555 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
556 various kinds of references seen. In all cases, *BSI is an iterator
557 pointing to the statement being processed. */
560 /* Invoked when ELT is required as a unit. Note that ELT might refer to
561 a leaf node, in which case this is a simple scalar reference. *EXPR_P
562 points to the location of the expression. IS_OUTPUT is true if this
563 is a left-hand-side reference. */
564 void (*use) (struct sra_elt *elt, tree *expr_p,
565 block_stmt_iterator *bsi, bool is_output);
567 /* Invoked when we have a copy between two scalarizable references. */
568 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
569 block_stmt_iterator *bsi);
571 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
572 in which case it should be treated as an empty CONSTRUCTOR. */
573 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
575 /* Invoked when we have a copy between one scalarizable reference ELT
576 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
577 is on the left-hand side. */
578 void (*ldst) (struct sra_elt *elt, tree other,
579 block_stmt_iterator *bsi, bool is_output);
581 /* True during phase 2, false during phase 4. */
582 /* ??? This is a hack. */
586 #ifdef ENABLE_CHECKING
587 /* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
590 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
591 void *data ATTRIBUTE_UNUSED)
594 enum tree_code code = TREE_CODE (t);
596 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
599 if (is_sra_candidate_decl (t))
609 /* Walk most expressions looking for a scalarizable aggregate.
610 If we find one, invoke FNS->USE. */
613 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
614 const struct sra_walk_fns *fns)
618 bool disable_scalarization = false;
620 /* We're looking to collect a reference expression between EXPR and INNER,
621 such that INNER is a scalarizable decl and all other nodes through EXPR
622 are references that we can scalarize. If we come across something that
623 we can't scalarize, we reset EXPR. This has the effect of making it
624 appear that we're referring to the larger expression as a whole. */
627 switch (TREE_CODE (inner))
632 /* If there is a scalarizable decl at the bottom, then process it. */
633 if (is_sra_candidate_decl (inner))
635 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
636 if (disable_scalarization)
637 elt->cannot_scalarize = true;
639 fns->use (elt, expr_p, bsi, is_output);
644 /* Non-constant index means any member may be accessed. Prevent the
645 expression from being scalarized. If we were to treat this as a
646 reference to the whole array, we can wind up with a single dynamic
647 index reference inside a loop being overridden by several constant
648 index references during loop setup. It's possible that this could
649 be avoided by using dynamic usage counts based on BB trip counts
650 (based on loop analysis or profiling), but that hardly seems worth
652 /* ??? Hack. Figure out how to push this into the scan routines
653 without duplicating too much code. */
654 if (!is_valid_const_index (inner))
656 disable_scalarization = true;
659 /* ??? Are we assured that non-constant bounds and stride will have
660 the same value everywhere? I don't think Fortran will... */
661 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
663 inner = TREE_OPERAND (inner, 0);
667 /* A reference to a union member constitutes a reference to the
669 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
671 /* ??? See above re non-constant stride. */
672 if (TREE_OPERAND (inner, 2))
674 inner = TREE_OPERAND (inner, 0);
679 inner = TREE_OPERAND (inner, 0);
683 /* A bit field reference (access to *multiple* fields simultaneously)
684 is not currently scalarized. Consider this an access to the
685 complete outer element, to which walk_tree will bring us next. */
688 case ARRAY_RANGE_REF:
689 /* Similarly, an subrange reference is used to modify indexing. Which
690 means that the canonical element names that we have won't work. */
693 case VIEW_CONVERT_EXPR:
695 /* Similarly, a view/nop explicitly wants to look at an object in a
696 type other than the one we've scalarized. */
700 expr_p = &TREE_OPERAND (inner, 0);
701 inner = expr = *expr_p;
705 #ifdef ENABLE_CHECKING
706 /* Validate that we're not missing any references. */
707 if (walk_tree (&inner, sra_find_candidate_decl, NULL, NULL))
714 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
715 If we find one, invoke FNS->USE. */
718 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
719 const struct sra_walk_fns *fns)
722 for (op = list; op ; op = TREE_CHAIN (op))
723 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
726 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
727 If we find one, invoke FNS->USE. */
730 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
731 const struct sra_walk_fns *fns)
733 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
736 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
737 aggregates. If we find one, invoke FNS->USE. */
740 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
741 const struct sra_walk_fns *fns)
743 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
744 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
747 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
750 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
751 const struct sra_walk_fns *fns)
753 struct sra_elt *lhs_elt, *rhs_elt;
756 lhs = TREE_OPERAND (expr, 0);
757 rhs = TREE_OPERAND (expr, 1);
758 lhs_elt = maybe_lookup_element_for_expr (lhs);
759 rhs_elt = maybe_lookup_element_for_expr (rhs);
761 /* If both sides are scalarizable, this is a COPY operation. */
762 if (lhs_elt && rhs_elt)
764 fns->copy (lhs_elt, rhs_elt, bsi);
770 /* If this is an assignment from a constant, or constructor, then
771 we have access to all of the elements individually. Invoke INIT. */
772 if (TREE_CODE (rhs) == COMPLEX_EXPR
773 || TREE_CODE (rhs) == COMPLEX_CST
774 || TREE_CODE (rhs) == CONSTRUCTOR)
775 fns->init (lhs_elt, rhs, bsi);
777 /* If this is an assignment from read-only memory, treat this as if
778 we'd been passed the constructor directly. Invoke INIT. */
779 else if (TREE_CODE (rhs) == VAR_DECL
781 && TREE_READONLY (rhs)
782 && targetm.binds_local_p (rhs))
783 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
785 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
786 The lvalue requirement prevents us from trying to directly scalarize
787 the result of a function call. Which would result in trying to call
788 the function multiple times, and other evil things. */
789 else if (!lhs_elt->is_scalar && is_gimple_addr_expr_arg (rhs))
790 fns->ldst (lhs_elt, rhs, bsi, true);
792 /* Otherwise we're being used in some context that requires the
793 aggregate to be seen as a whole. Invoke USE. */
795 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
799 /* LHS_ELT being null only means that the LHS as a whole is not a
800 scalarizable reference. There may be occurrences of scalarizable
801 variables within, which implies a USE. */
802 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
805 /* Likewise for the right-hand side. The only difference here is that
806 we don't have to handle constants, and the RHS may be a call. */
809 if (!rhs_elt->is_scalar)
810 fns->ldst (rhs_elt, lhs, bsi, false);
812 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
816 tree call = get_call_expr_in (rhs);
818 sra_walk_call_expr (call, bsi, fns);
820 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
824 /* Entry point to the walk functions. Search the entire function,
825 invoking the callbacks in FNS on each of the references to
826 scalarizable variables. */
829 sra_walk_function (const struct sra_walk_fns *fns)
832 block_stmt_iterator si, ni;
834 /* ??? Phase 4 could derive some benefit to walking the function in
835 dominator tree order. */
838 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
843 stmt = bsi_stmt (si);
844 ann = stmt_ann (stmt);
849 /* If the statement has no virtual operands, then it doesn't
850 make any structure references that we care about. */
851 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
852 && NUM_VUSES (VUSE_OPS (ann)) == 0
853 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
856 switch (TREE_CODE (stmt))
859 /* If we have "return <retval>" then the return value is
860 already exposed for our pleasure. Walk it as a USE to
861 force all the components back in place for the return.
863 If we have an embedded assignment, then <retval> is of
864 a type that gets returned in registers in this ABI, and
865 we do not wish to extend their lifetimes. Treat this
866 as a USE of the variable on the RHS of this assignment. */
868 t = TREE_OPERAND (stmt, 0);
869 if (TREE_CODE (t) == MODIFY_EXPR)
870 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
872 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
876 sra_walk_modify_expr (stmt, &si, fns);
879 sra_walk_call_expr (stmt, &si, fns);
882 sra_walk_asm_expr (stmt, &si, fns);
891 /* Phase One: Scan all referenced variables in the program looking for
892 structures that could be decomposed. */
895 find_candidates_for_sra (void)
898 bool any_set = false;
900 for (i = 0; i < num_referenced_vars; i++)
902 tree var = referenced_var (i);
903 if (decl_can_be_decomposed_p (var))
905 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
914 /* Phase Two: Scan all references to scalarizable variables. Count the
915 number of times they are used or copied respectively. */
917 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
918 considered a copy, because we can decompose the reference such that
919 the sub-elements needn't be contiguous. */
922 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
923 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
924 bool is_output ATTRIBUTE_UNUSED)
930 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
931 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
933 lhs_elt->n_copies += 1;
934 rhs_elt->n_copies += 1;
938 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
939 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
941 lhs_elt->n_copies += 1;
945 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
946 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
947 bool is_output ATTRIBUTE_UNUSED)
952 /* Dump the values we collected during the scanning phase. */
955 scan_dump (struct sra_elt *elt)
959 dump_sra_elt_name (dump_file, elt);
960 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
962 for (c = elt->children; c ; c = c->sibling)
966 /* Entry point to phase 2. Scan the entire function, building up
967 scalarization data structures, recording copies and uses. */
972 static const struct sra_walk_fns fns = {
973 scan_use, scan_copy, scan_init, scan_ldst, true
976 sra_walk_function (&fns);
978 if (dump_file && (dump_flags & TDF_DETAILS))
982 fputs ("\nScan results:\n", dump_file);
983 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
985 tree var = referenced_var (i);
986 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
990 fputc ('\n', dump_file);
994 /* Phase Three: Make decisions about which variables to scalarize, if any.
995 All elements to be scalarized have replacement variables made for them. */
997 /* A subroutine of build_element_name. Recursively build the element
998 name on the obstack. */
1001 build_element_name_1 (struct sra_elt *elt)
1008 build_element_name_1 (elt->parent);
1009 obstack_1grow (&sra_obstack, '$');
1011 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1013 if (elt->element == integer_zero_node)
1014 obstack_grow (&sra_obstack, "real", 4);
1016 obstack_grow (&sra_obstack, "imag", 4);
1022 if (TREE_CODE (t) == INTEGER_CST)
1024 /* ??? Eh. Don't bother doing double-wide printing. */
1025 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1026 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1030 tree name = DECL_NAME (t);
1032 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1033 IDENTIFIER_LENGTH (name));
1036 sprintf (buffer, "D%u", DECL_UID (t));
1037 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1042 /* Construct a pretty variable name for an element's replacement variable.
1043 The name is built on the obstack. */
1046 build_element_name (struct sra_elt *elt)
1048 build_element_name_1 (elt);
1049 obstack_1grow (&sra_obstack, '\0');
1050 return obstack_finish (&sra_obstack);
1053 /* Instantiate an element as an independent variable. */
1056 instantiate_element (struct sra_elt *elt)
1058 struct sra_elt *base_elt;
1061 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1063 base = base_elt->element;
1065 elt->replacement = var = make_rename_temp (elt->type, "SR");
1066 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1067 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1068 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1070 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1072 char *pretty_name = build_element_name (elt);
1073 DECL_NAME (var) = get_identifier (pretty_name);
1074 obstack_free (&sra_obstack, pretty_name);
1079 fputs (" ", dump_file);
1080 dump_sra_elt_name (dump_file, elt);
1081 fputs (" -> ", dump_file);
1082 print_generic_expr (dump_file, var, dump_flags);
1083 fputc ('\n', dump_file);
1087 /* Make one pass across an element tree deciding whether or not it's
1088 profitable to instantiate individual leaf scalars.
1090 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1091 fields all the way up the tree. */
1094 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1095 unsigned int parent_copies)
1097 if (dump_file && !elt->parent)
1099 fputs ("Initial instantiation for ", dump_file);
1100 dump_sra_elt_name (dump_file, elt);
1101 fputc ('\n', dump_file);
1104 if (elt->cannot_scalarize)
1109 /* The decision is simple: instantiate if we're used more frequently
1110 than the parent needs to be seen as a complete unit. */
1111 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1112 instantiate_element (elt);
1117 unsigned int this_uses = elt->n_uses + parent_uses;
1118 unsigned int this_copies = elt->n_copies + parent_copies;
1120 for (c = elt->children; c ; c = c->sibling)
1121 decide_instantiation_1 (c, this_uses, this_copies);
1125 /* Compute the size and number of all instantiated elements below ELT.
1126 We will only care about this if the size of the complete structure
1127 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1130 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1132 if (elt->replacement)
1134 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1140 unsigned int count = 0;
1142 for (c = elt->children; c ; c = c->sibling)
1143 count += sum_instantiated_sizes (c, sizep);
1149 /* Instantiate fields in ELT->TYPE that are not currently present as
1152 static void instantiate_missing_elements (struct sra_elt *elt);
1155 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1157 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1160 if (sub->replacement == NULL)
1161 instantiate_element (sub);
1164 instantiate_missing_elements (sub);
1168 instantiate_missing_elements (struct sra_elt *elt)
1170 tree type = elt->type;
1172 switch (TREE_CODE (type))
1177 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1178 if (TREE_CODE (f) == FIELD_DECL)
1179 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1185 tree i, max, subtype;
1187 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1188 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1189 subtype = TREE_TYPE (type);
1193 instantiate_missing_elements_1 (elt, i, subtype);
1194 if (tree_int_cst_equal (i, max))
1196 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1203 type = TREE_TYPE (type);
1204 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1205 instantiate_missing_elements_1 (elt, integer_one_node, type);
1213 /* Make one pass across an element tree deciding whether to perform block
1214 or element copies. If we decide on element copies, instantiate all
1215 elements. Return true if there are any instantiated sub-elements. */
1218 decide_block_copy (struct sra_elt *elt)
1223 /* If scalarization is disabled, respect it. */
1224 if (elt->cannot_scalarize)
1226 elt->use_block_copy = 1;
1230 fputs ("Scalarization disabled for ", dump_file);
1231 dump_sra_elt_name (dump_file, elt);
1232 fputc ('\n', dump_file);
1238 /* Don't decide if we've no uses. */
1239 if (elt->n_uses == 0 && elt->n_copies == 0)
1242 else if (!elt->is_scalar)
1244 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1245 bool use_block_copy = true;
1247 /* Don't bother trying to figure out the rest if the structure is
1248 so large we can't do easy arithmetic. This also forces block
1249 copies for variable sized structures. */
1250 if (host_integerp (size_tree, 1))
1252 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1253 unsigned int inst_count;
1255 full_size = tree_low_cst (size_tree, 1);
1257 /* ??? What to do here. If there are two fields, and we've only
1258 instantiated one, then instantiating the other is clearly a win.
1259 If there are a large number of fields then the size of the copy
1260 is much more of a factor. */
1262 /* If the structure is small, and we've made copies, go ahead
1263 and instantiate, hoping that the copies will go away. */
1264 if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
1265 && elt->n_copies > elt->n_uses)
1266 use_block_copy = false;
1269 inst_count = sum_instantiated_sizes (elt, &inst_size);
1271 if (inst_size * 4 >= full_size * 3)
1272 use_block_copy = false;
1275 /* In order to avoid block copy, we have to be able to instantiate
1276 all elements of the type. See if this is possible. */
1278 && (!can_completely_scalarize_p (elt)
1279 || !type_can_instantiate_all_elements (elt->type)))
1280 use_block_copy = true;
1282 elt->use_block_copy = use_block_copy;
1286 fprintf (dump_file, "Using %s for ",
1287 use_block_copy ? "block-copy" : "element-copy");
1288 dump_sra_elt_name (dump_file, elt);
1289 fputc ('\n', dump_file);
1292 if (!use_block_copy)
1294 instantiate_missing_elements (elt);
1299 any_inst = elt->replacement != NULL;
1301 for (c = elt->children; c ; c = c->sibling)
1302 any_inst |= decide_block_copy (c);
1307 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1310 decide_instantiations (void)
1314 struct bitmap_head_def done_head;
1316 /* We cannot clear bits from a bitmap we're iterating over,
1317 so save up all the bits to clear until the end. */
1318 bitmap_initialize (&done_head, 1);
1319 cleared_any = false;
1321 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
1323 tree var = referenced_var (i);
1324 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1327 decide_instantiation_1 (elt, 0, 0);
1328 if (!decide_block_copy (elt))
1333 bitmap_set_bit (&done_head, i);
1340 bitmap_operation (sra_candidates, sra_candidates, &done_head,
1342 bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
1345 bitmap_clear (&done_head);
1348 fputc ('\n', dump_file);
1352 /* Phase Four: Update the function to match the replacements created. */
1354 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1355 renaming. This becomes necessary when we modify all of a non-scalar. */
1358 mark_all_v_defs (tree stmt)
1360 v_may_def_optype v_may_defs;
1361 v_must_def_optype v_must_defs;
1364 get_stmt_operands (stmt);
1366 v_may_defs = V_MAY_DEF_OPS (stmt_ann (stmt));
1367 n = NUM_V_MAY_DEFS (v_may_defs);
1368 for (i = 0; i < n; i++)
1370 tree sym = V_MAY_DEF_RESULT (v_may_defs, i);
1371 if (TREE_CODE (sym) == SSA_NAME)
1372 sym = SSA_NAME_VAR (sym);
1373 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1376 v_must_defs = V_MUST_DEF_OPS (stmt_ann (stmt));
1377 n = NUM_V_MUST_DEFS (v_must_defs);
1378 for (i = 0; i < n; i++)
1380 tree sym = V_MUST_DEF_OP (v_must_defs, i);
1381 if (TREE_CODE (sym) == SSA_NAME)
1382 sym = SSA_NAME_VAR (sym);
1383 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1387 /* Build a single level component reference to ELT rooted at BASE. */
1390 generate_one_element_ref (struct sra_elt *elt, tree base)
1392 switch (TREE_CODE (TREE_TYPE (base)))
1395 return build (COMPONENT_REF, elt->type, base, elt->element, NULL);
1398 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1401 if (elt->element == integer_zero_node)
1402 return build (REALPART_EXPR, elt->type, base);
1404 return build (IMAGPART_EXPR, elt->type, base);
1411 /* Build a full component reference to ELT rooted at its native variable. */
1414 generate_element_ref (struct sra_elt *elt)
1417 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1419 return elt->element;
1422 /* Generate a set of assignment statements in *LIST_P to copy all
1423 instantiated elements under ELT to or from the equivalent structure
1424 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1425 true meaning to copy out of EXPR into ELT. */
1428 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1434 if (elt->replacement)
1437 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1439 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1440 append_to_statement_list (t, list_p);
1444 for (c = elt->children; c ; c = c->sibling)
1446 t = generate_one_element_ref (c, unshare_expr (expr));
1447 generate_copy_inout (c, copy_out, t, list_p);
1452 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1453 elements under SRC to their counterparts under DST. There must be a 1-1
1454 correspondence of instantiated elements. */
1457 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1459 struct sra_elt *dc, *sc;
1461 for (dc = dst->children; dc ; dc = dc->sibling)
1463 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1466 generate_element_copy (dc, sc, list_p);
1469 if (dst->replacement)
1473 if (src->replacement == NULL)
1476 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1478 append_to_statement_list (t, list_p);
1482 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1483 elements under ELT. In addition, do not assign to elements that have been
1484 marked VISITED but do reset the visited flag; this allows easy coordination
1485 with generate_element_init. */
1488 generate_element_zero (struct sra_elt *elt, tree *list_p)
1494 elt->visited = false;
1498 for (c = elt->children; c ; c = c->sibling)
1499 generate_element_zero (c, list_p);
1501 if (elt->replacement)
1506 t = fold_convert (elt->type, integer_zero_node);
1508 /* We generated a replacement for a non-scalar? */
1511 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1512 append_to_statement_list (t, list_p);
1516 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1517 elements under ELT with the contents of the initializer INIT. In addition,
1518 mark all assigned elements VISITED; this allows easy coordination with
1519 generate_element_zero. Return false if we found a case we couldn't
1523 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1526 enum tree_code init_code;
1527 struct sra_elt *sub;
1530 /* We can be passed DECL_INITIAL of a static variable. It might have a
1531 conversion, which we strip off here. */
1532 STRIP_USELESS_TYPE_CONVERSION (init);
1533 init_code = TREE_CODE (init);
1537 if (elt->replacement)
1539 t = build (MODIFY_EXPR, void_type_node, elt->replacement, init);
1540 append_to_statement_list (t, list_p);
1541 elt->visited = true;
1550 for (sub = elt->children; sub ; sub = sub->sibling)
1552 if (sub->element == integer_zero_node)
1553 t = (init_code == COMPLEX_EXPR
1554 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1556 t = (init_code == COMPLEX_EXPR
1557 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1558 result &= generate_element_init (sub, t, list_p);
1563 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1565 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1568 result &= generate_element_init (sub, TREE_VALUE (t), list_p);
1573 elt->visited = true;
1580 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1581 has more than one edge, STMT will be replicated for each edge. Also,
1582 abnormal edges will be ignored. */
1585 insert_edge_copies (tree stmt, basic_block bb)
1591 for (e = bb->succ; e; e = e->succ_next)
1593 /* We don't need to insert copies on abnormal edges. The
1594 value of the scalar replacement is not guaranteed to
1595 be valid through an abnormal edge. */
1596 if (!(e->flags & EDGE_ABNORMAL))
1600 bsi_insert_on_edge (e, stmt);
1604 bsi_insert_on_edge (e, lhd_unsave_expr_now (stmt));
1609 /* Helper function to insert LIST before BSI, and set up line number info. */
1612 sra_insert_before (block_stmt_iterator *bsi, tree list)
1614 tree stmt = bsi_stmt (*bsi);
1616 if (EXPR_HAS_LOCATION (stmt))
1617 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1618 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1621 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1624 sra_insert_after (block_stmt_iterator *bsi, tree list)
1626 tree stmt = bsi_stmt (*bsi);
1628 if (EXPR_HAS_LOCATION (stmt))
1629 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1631 if (stmt_ends_bb_p (stmt))
1632 insert_edge_copies (list, bsi->bb);
1634 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1637 /* Similarly, but replace the statement at BSI. */
1640 sra_replace (block_stmt_iterator *bsi, tree list)
1642 sra_insert_before (bsi, list);
1644 if (bsi_end_p (*bsi))
1645 *bsi = bsi_last (bsi->bb);
1650 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1651 if elt is scalar, or some occurrence of ELT that requires a complete
1652 aggregate. IS_OUTPUT is true if ELT is being modified. */
1655 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1658 tree list = NULL, stmt = bsi_stmt (*bsi);
1660 if (elt->replacement)
1662 /* If we have a replacement, then updating the reference is as
1663 simple as modifying the existing statement in place. */
1665 mark_all_v_defs (stmt);
1666 *expr_p = elt->replacement;
1671 /* Otherwise we need some copies. If ELT is being read, then we want
1672 to store all (modified) sub-elements back into the structure before
1673 the reference takes place. If ELT is being written, then we want to
1674 load the changed values back into our shadow variables. */
1675 /* ??? We don't check modified for reads, we just always write all of
1676 the values. We should be able to record the SSA number of the VOP
1677 for which the values were last read. If that number matches the
1678 SSA number of the VOP in the current statement, then we needn't
1679 emit an assignment. This would also eliminate double writes when
1680 a structure is passed as more than one argument to a function call.
1681 This optimization would be most effective if sra_walk_function
1682 processed the blocks in dominator order. */
1684 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1689 mark_all_v_defs (expr_first (list));
1690 sra_insert_after (bsi, list);
1693 sra_insert_before (bsi, list);
1697 /* Scalarize a COPY. To recap, this is an assignment statement between
1698 two scalarizable references, LHS_ELT and RHS_ELT. */
1701 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1702 block_stmt_iterator *bsi)
1706 if (lhs_elt->replacement && rhs_elt->replacement)
1708 /* If we have two scalar operands, modify the existing statement. */
1709 stmt = bsi_stmt (*bsi);
1711 #ifdef ENABLE_CHECKING
1712 /* See the commentary in sra_walk_function concerning
1713 RETURN_EXPR, and why we should never see one here. */
1714 if (TREE_CODE (stmt) != MODIFY_EXPR)
1718 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1719 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1722 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1724 /* If either side requires a block copy, then sync the RHS back
1725 to the original structure, leave the original assignment
1726 statement (which will perform the block copy), then load the
1727 LHS values out of its now-updated original structure. */
1728 /* ??? Could perform a modified pair-wise element copy. That
1729 would at least allow those elements that are instantiated in
1730 both structures to be optimized well. */
1733 generate_copy_inout (rhs_elt, false,
1734 generate_element_ref (rhs_elt), &list);
1737 mark_all_v_defs (expr_first (list));
1738 sra_insert_before (bsi, list);
1742 generate_copy_inout (lhs_elt, true,
1743 generate_element_ref (lhs_elt), &list);
1745 sra_insert_after (bsi, list);
1749 /* Otherwise both sides must be fully instantiated. In which
1750 case perform pair-wise element assignments and replace the
1751 original block copy statement. */
1753 stmt = bsi_stmt (*bsi);
1754 mark_all_v_defs (stmt);
1757 generate_element_copy (lhs_elt, rhs_elt, &list);
1760 sra_replace (bsi, list);
1764 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1765 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1766 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1770 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1775 /* Generate initialization statements for all members extant in the RHS. */
1777 result = generate_element_init (lhs_elt, rhs, &list);
1779 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1780 a zero value. Initialize the rest of the instantiated elements. */
1781 generate_element_zero (lhs_elt, &list);
1785 /* If we failed to convert the entire initializer, then we must
1786 leave the structure assignment in place and must load values
1787 from the structure into the slots for which we did not find
1788 constants. The easiest way to do this is to generate a complete
1789 copy-out, and then follow that with the constant assignments
1790 that we were able to build. DCE will clean things up. */
1792 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
1794 append_to_statement_list (list, &list0);
1798 if (lhs_elt->use_block_copy || !result)
1800 /* Since LHS is not fully instantiated, we must leave the structure
1801 assignment in place. Treating this case differently from a USE
1802 exposes constants to later optimizations. */
1805 mark_all_v_defs (expr_first (list));
1806 sra_insert_after (bsi, list);
1811 /* The LHS is fully instantiated. The list of initializations
1812 replaces the original structure assignment. */
1815 mark_all_v_defs (bsi_stmt (*bsi));
1816 sra_replace (bsi, list);
1820 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1821 on all INDIRECT_REFs. */
1824 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1828 if (TREE_CODE (t) == INDIRECT_REF)
1830 TREE_THIS_NOTRAP (t) = 1;
1833 else if (DECL_P (t) || TYPE_P (t))
1839 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1840 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1841 if ELT is on the left-hand side. */
1844 scalarize_ldst (struct sra_elt *elt, tree other,
1845 block_stmt_iterator *bsi, bool is_output)
1847 /* Shouldn't have gotten called for a scalar. */
1848 if (elt->replacement)
1851 if (elt->use_block_copy)
1853 /* Since ELT is not fully instantiated, we have to leave the
1854 block copy in place. Treat this as a USE. */
1855 scalarize_use (elt, NULL, bsi, is_output);
1859 /* The interesting case is when ELT is fully instantiated. In this
1860 case we can have each element stored/loaded directly to/from the
1861 corresponding slot in OTHER. This avoids a block copy. */
1863 tree list = NULL, stmt = bsi_stmt (*bsi);
1865 mark_all_v_defs (stmt);
1866 generate_copy_inout (elt, is_output, other, &list);
1870 /* Preserve EH semantics. */
1871 if (stmt_ends_bb_p (stmt))
1873 tree_stmt_iterator tsi;
1876 /* Extract the first statement from LIST. */
1877 tsi = tsi_start (list);
1878 first = tsi_stmt (tsi);
1881 /* Replace the old statement with this new representative. */
1882 bsi_replace (bsi, first, true);
1884 if (!tsi_end_p (tsi))
1886 /* If any reference would trap, then they all would. And more
1887 to the point, the first would. Therefore none of the rest
1888 will trap since the first didn't. Indicate this by
1889 iterating over the remaining statements and set
1890 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1893 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1896 while (!tsi_end_p (tsi));
1898 insert_edge_copies (list, bsi->bb);
1902 sra_replace (bsi, list);
1906 /* Generate initializations for all scalarizable parameters. */
1909 scalarize_parms (void)
1914 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
1916 tree var = referenced_var (i);
1917 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1918 generate_copy_inout (elt, true, var, &list);
1922 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1925 /* Entry point to phase 4. Update the function to match replacements. */
1928 scalarize_function (void)
1930 static const struct sra_walk_fns fns = {
1931 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
1934 sra_walk_function (&fns);
1936 bsi_commit_edge_inserts (NULL);
1940 /* Debug helper function. Print ELT in a nice human-readable format. */
1943 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
1945 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1947 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
1948 dump_sra_elt_name (f, elt->parent);
1953 dump_sra_elt_name (f, elt->parent);
1954 if (DECL_P (elt->element))
1956 if (TREE_CODE (elt->element) == FIELD_DECL)
1958 print_generic_expr (f, elt->element, dump_flags);
1961 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
1962 TREE_INT_CST_LOW (elt->element));
1966 /* Likewise, but callable from the debugger. */
1969 debug_sra_elt_name (struct sra_elt *elt)
1971 dump_sra_elt_name (stderr, elt);
1972 fputc ('\n', stderr);
1975 /* Main entry point. */
1980 /* Initialize local variables. */
1981 gcc_obstack_init (&sra_obstack);
1982 sra_candidates = BITMAP_XMALLOC ();
1983 needs_copy_in = BITMAP_XMALLOC ();
1984 sra_type_decomp_cache = BITMAP_XMALLOC ();
1985 sra_type_inst_cache = BITMAP_XMALLOC ();
1986 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
1988 /* Scan. If we find anything, instantiate and scalarize. */
1989 if (find_candidates_for_sra ())
1992 decide_instantiations ();
1993 scalarize_function ();
1996 /* Free allocated memory. */
1997 htab_delete (sra_map);
1999 BITMAP_XFREE (sra_candidates);
2000 BITMAP_XFREE (needs_copy_in);
2001 BITMAP_XFREE (sra_type_decomp_cache);
2002 BITMAP_XFREE (sra_type_inst_cache);
2003 obstack_free (&sra_obstack, NULL);
2009 return flag_tree_sra != 0;
2012 struct tree_opt_pass pass_sra =
2015 gate_sra, /* gate */
2016 tree_sra, /* execute */
2019 0, /* static_pass_number */
2020 TV_TREE_SRA, /* tv_id */
2021 PROP_cfg | PROP_ssa, /* properties_required */
2022 0, /* properties_provided */
2023 0, /* properties_destroyed */
2024 0, /* todo_flags_start */
2025 TODO_dump_func | TODO_rename_vars
2026 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */