1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behaviour, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
147 /* Return true if DECL is an SRA candidate. */
150 is_sra_candidate_decl (tree decl)
152 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
155 /* Return true if TYPE is a scalar type. */
158 is_sra_scalar_type (tree type)
160 enum tree_code code = TREE_CODE (type);
161 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
162 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
163 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
164 || code == REFERENCE_TYPE);
167 /* Return true if TYPE can be decomposed into a set of independent variables.
169 Note that this doesn't imply that all elements of TYPE can be
170 instantiated, just that if we decide to break up the type into
171 separate pieces that it can be done. */
174 type_can_be_decomposed_p (tree type)
176 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
179 /* Avoid searching the same type twice. */
180 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
185 /* The type must have a definite non-zero size. */
186 if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
189 /* The type must be a non-union aggregate. */
190 switch (TREE_CODE (type))
194 bool saw_one_field = false;
196 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
197 if (TREE_CODE (t) == FIELD_DECL)
199 /* Reject incorrectly represented bit fields. */
200 if (DECL_BIT_FIELD (t)
201 && (tree_low_cst (DECL_SIZE (t), 1)
202 != TYPE_PRECISION (TREE_TYPE (t))))
205 saw_one_field = true;
208 /* Record types must have at least one field. */
215 /* Array types must have a fixed lower and upper bound. */
216 t = TYPE_DOMAIN (type);
219 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
221 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
232 bitmap_set_bit (sra_type_decomp_cache, cache+0);
236 bitmap_set_bit (sra_type_decomp_cache, cache+1);
240 /* Return true if DECL can be decomposed into a set of independent
241 (though not necessarily scalar) variables. */
244 decl_can_be_decomposed_p (tree var)
246 /* Early out for scalars. */
247 if (is_sra_scalar_type (TREE_TYPE (var)))
250 /* The variable must not be aliased. */
251 if (!is_gimple_non_addressable (var))
253 if (dump_file && (dump_flags & TDF_DETAILS))
255 fprintf (dump_file, "Cannot scalarize variable ");
256 print_generic_expr (dump_file, var, dump_flags);
257 fprintf (dump_file, " because it must live in memory\n");
262 /* The variable must not be volatile. */
263 if (TREE_THIS_VOLATILE (var))
265 if (dump_file && (dump_flags & TDF_DETAILS))
267 fprintf (dump_file, "Cannot scalarize variable ");
268 print_generic_expr (dump_file, var, dump_flags);
269 fprintf (dump_file, " because it is declared volatile\n");
274 /* We must be able to decompose the variable's type. */
275 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
277 if (dump_file && (dump_flags & TDF_DETAILS))
279 fprintf (dump_file, "Cannot scalarize variable ");
280 print_generic_expr (dump_file, var, dump_flags);
281 fprintf (dump_file, " because its type cannot be decomposed\n");
289 /* Return true if TYPE can be *completely* decomposed into scalars. */
292 type_can_instantiate_all_elements (tree type)
294 if (is_sra_scalar_type (type))
296 if (!type_can_be_decomposed_p (type))
299 switch (TREE_CODE (type))
303 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
308 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
312 if (TREE_CODE (f) == FIELD_DECL)
314 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
316 bitmap_set_bit (sra_type_inst_cache, cache+1);
321 bitmap_set_bit (sra_type_inst_cache, cache+0);
326 return type_can_instantiate_all_elements (TREE_TYPE (type));
336 /* Test whether ELT or some sub-element cannot be scalarized. */
339 can_completely_scalarize_p (struct sra_elt *elt)
343 if (elt->cannot_scalarize)
346 for (c = elt->children; c ; c = c->sibling)
347 if (!can_completely_scalarize_p (c))
354 /* A simplified tree hashing algorithm that only handles the types of
355 trees we expect to find in sra_elt->element. */
358 sra_hash_tree (tree t)
360 switch (TREE_CODE (t))
368 return TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
374 /* Hash function for type SRA_PAIR. */
377 sra_elt_hash (const void *x)
379 const struct sra_elt *e = x;
380 const struct sra_elt *p;
383 h = sra_hash_tree (e->element);
385 /* Take into account everything back up the chain. Given that chain
386 lengths are rarely very long, this should be acceptable. If we
387 truely identify this as a performance problem, it should work to
388 hash the pointer value "e->parent". */
389 for (p = e->parent; p ; p = p->parent)
390 h = (h * 65521) ^ sra_hash_tree (p->element);
395 /* Equality function for type SRA_PAIR. */
398 sra_elt_eq (const void *x, const void *y)
400 const struct sra_elt *a = x;
401 const struct sra_elt *b = y;
403 if (a->parent != b->parent)
406 /* All the field/decl stuff is unique. */
407 if (a->element == b->element)
410 /* The only thing left is integer equality. */
411 if (TREE_CODE (a->element) == INTEGER_CST
412 && TREE_CODE (b->element) == INTEGER_CST)
413 return tree_int_cst_equal (a->element, b->element);
418 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
419 may be null, in which case CHILD must be a DECL. */
421 static struct sra_elt *
422 lookup_element (struct sra_elt *parent, tree child, tree type,
423 enum insert_option insert)
425 struct sra_elt dummy;
426 struct sra_elt **slot;
429 dummy.parent = parent;
430 dummy.element = child;
432 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
433 if (!slot && insert == NO_INSERT)
437 if (!elt && insert == INSERT)
439 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
440 memset (elt, 0, sizeof (*elt));
442 elt->parent = parent;
443 elt->element = child;
445 elt->is_scalar = is_sra_scalar_type (type);
449 elt->sibling = parent->children;
450 parent->children = elt;
453 /* If this is a parameter, then if we want to scalarize, we have
454 one copy from the true function parameter. Count it now. */
455 if (TREE_CODE (child) == PARM_DECL)
458 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
465 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
468 is_valid_const_index (tree expr)
470 tree dom, t, index = TREE_OPERAND (expr, 1);
472 if (TREE_CODE (index) != INTEGER_CST)
475 /* Watch out for stupid user tricks, indexing outside the array.
477 Careful, we're not called only on scalarizable types, so do not
478 assume constant array bounds. We needn't do anything with such
479 cases, since they'll be referring to objects that we should have
480 already rejected for scalarization, so returning false is fine. */
482 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
486 t = TYPE_MIN_VALUE (dom);
487 if (!t || TREE_CODE (t) != INTEGER_CST)
489 if (tree_int_cst_lt (index, t))
492 t = TYPE_MAX_VALUE (dom);
493 if (!t || TREE_CODE (t) != INTEGER_CST)
495 if (tree_int_cst_lt (t, index))
501 /* Create or return the SRA_ELT structure for EXPR if the expression
502 refers to a scalarizable variable. */
504 static struct sra_elt *
505 maybe_lookup_element_for_expr (tree expr)
510 switch (TREE_CODE (expr))
515 if (is_sra_candidate_decl (expr))
516 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
520 /* We can't scalarize variable array indicies. */
521 if (is_valid_const_index (expr))
522 child = TREE_OPERAND (expr, 1);
528 /* Don't look through unions. */
529 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
531 child = TREE_OPERAND (expr, 1);
535 child = integer_zero_node;
538 child = integer_one_node;
545 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
547 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
552 /* Functions to walk just enough of the tree to see all scalarizable
553 references, and categorize them. */
555 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
556 various kinds of references seen. In all cases, *BSI is an iterator
557 pointing to the statement being processed. */
560 /* Invoked when ELT is required as a unit. Note that ELT might refer to
561 a leaf node, in which case this is a simple scalar reference. *EXPR_P
562 points to the location of the expression. IS_OUTPUT is true if this
563 is a left-hand-side reference. */
564 void (*use) (struct sra_elt *elt, tree *expr_p,
565 block_stmt_iterator *bsi, bool is_output);
567 /* Invoked when we have a copy between two scalarizable references. */
568 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
569 block_stmt_iterator *bsi);
571 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
572 in which case it should be treated as an empty CONSTRUCTOR. */
573 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
575 /* Invoked when we have a copy between one scalarizable reference ELT
576 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
577 is on the left-hand side. */
578 void (*ldst) (struct sra_elt *elt, tree other,
579 block_stmt_iterator *bsi, bool is_output);
581 /* True during phase 2, false during phase 4. */
582 /* ??? This is a hack. */
586 #ifdef ENABLE_CHECKING
587 /* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
590 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
591 void *data ATTRIBUTE_UNUSED)
594 enum tree_code code = TREE_CODE (t);
596 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
599 if (is_sra_candidate_decl (t))
609 /* Walk most expressions looking for a scalarizable aggregate.
610 If we find one, invoke FNS->USE. */
613 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
614 const struct sra_walk_fns *fns)
618 bool disable_scalarization = false;
620 /* We're looking to collect a reference expression between EXPR and INNER,
621 such that INNER is a scalarizable decl and all other nodes through EXPR
622 are references that we can scalarize. If we come across something that
623 we can't scalarize, we reset EXPR. This has the effect of making it
624 appear that we're referring to the larger expression as a whole. */
627 switch (TREE_CODE (inner))
632 /* If there is a scalarizable decl at the bottom, then process it. */
633 if (is_sra_candidate_decl (inner))
635 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
636 if (disable_scalarization)
637 elt->cannot_scalarize = true;
639 fns->use (elt, expr_p, bsi, is_output);
644 /* Non-constant index means any member may be accessed. Prevent the
645 expression from being scalarized. If we were to treat this as a
646 reference to the whole array, we can wind up with a single dynamic
647 index reference inside a loop being overridden by several constant
648 index references during loop setup. It's possible that this could
649 be avoided by using dynamic usage counts based on BB trip counts
650 (based on loop analysis or profiling), but that hardly seems worth
652 /* ??? Hack. Figure out how to push this into the scan routines
653 without duplicating too much code. */
654 if (!is_valid_const_index (inner))
656 disable_scalarization = true;
659 /* ??? Are we assured that non-constant bounds and stride will have
660 the same value everywhere? I don't think Fortran will... */
661 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
663 inner = TREE_OPERAND (inner, 0);
667 /* A reference to a union member constitutes a reference to the
669 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
671 /* ??? See above re non-constant stride. */
672 if (TREE_OPERAND (inner, 2))
674 inner = TREE_OPERAND (inner, 0);
679 inner = TREE_OPERAND (inner, 0);
683 /* A bit field reference (access to *multiple* fields simultaneously)
684 is not currently scalarized. Consider this an access to the
685 complete outer element, to which walk_tree will bring us next. */
688 case ARRAY_RANGE_REF:
689 /* Similarly, an subrange reference is used to modify indexing. Which
690 means that the canonical element names that we have won't work. */
693 case VIEW_CONVERT_EXPR:
695 /* Similarly, a view/nop explicitly wants to look at an object in a
696 type other than the one we've scalarized. */
700 expr_p = &TREE_OPERAND (inner, 0);
701 inner = expr = *expr_p;
705 #ifdef ENABLE_CHECKING
706 /* Validate that we're not missing any references. */
707 if (walk_tree (&inner, sra_find_candidate_decl, NULL, NULL))
714 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
715 If we find one, invoke FNS->USE. */
718 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
719 const struct sra_walk_fns *fns)
722 for (op = list; op ; op = TREE_CHAIN (op))
723 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
726 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
727 If we find one, invoke FNS->USE. */
730 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
731 const struct sra_walk_fns *fns)
733 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
736 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
737 aggregates. If we find one, invoke FNS->USE. */
740 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
741 const struct sra_walk_fns *fns)
743 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
744 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
747 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
750 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
751 const struct sra_walk_fns *fns)
753 struct sra_elt *lhs_elt, *rhs_elt;
756 lhs = TREE_OPERAND (expr, 0);
757 rhs = TREE_OPERAND (expr, 1);
758 lhs_elt = maybe_lookup_element_for_expr (lhs);
759 rhs_elt = maybe_lookup_element_for_expr (rhs);
761 /* If both sides are scalarizable, this is a COPY operation. */
762 if (lhs_elt && rhs_elt)
764 fns->copy (lhs_elt, rhs_elt, bsi);
770 /* If this is an assignment from a constant, or constructor, then
771 we have access to all of the elements individually. Invoke INIT. */
772 if (TREE_CODE (rhs) == COMPLEX_EXPR
773 || TREE_CODE (rhs) == COMPLEX_CST
774 || TREE_CODE (rhs) == CONSTRUCTOR)
775 fns->init (lhs_elt, rhs, bsi);
777 /* If this is an assignment from read-only memory, treat this as if
778 we'd been passed the constructor directly. Invoke INIT. */
779 else if (TREE_CODE (rhs) == VAR_DECL
781 && TREE_READONLY (rhs)
782 && targetm.binds_local_p (rhs))
784 if (DECL_INITIAL (rhs) != error_mark_node)
785 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
788 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
789 The lvalue requirement prevents us from trying to directly scalarize
790 the result of a function call. Which would result in trying to call
791 the function multiple times, and other evil things. */
792 else if (!lhs_elt->is_scalar && is_gimple_addr_expr_arg (rhs))
793 fns->ldst (lhs_elt, rhs, bsi, true);
795 /* Otherwise we're being used in some context that requires the
796 aggregate to be seen as a whole. Invoke USE. */
798 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
802 /* LHS_ELT being null only means that the LHS as a whole is not a
803 scalarizable reference. There may be occurrences of scalarizable
804 variables within, which implies a USE. */
805 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
808 /* Likewise for the right-hand side. The only difference here is that
809 we don't have to handle constants, and the RHS may be a call. */
812 if (!rhs_elt->is_scalar)
813 fns->ldst (rhs_elt, lhs, bsi, false);
815 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
817 else if (TREE_CODE (rhs) == CALL_EXPR)
818 sra_walk_call_expr (rhs, bsi, fns);
820 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
823 /* Entry point to the walk functions. Search the entire function,
824 invoking the callbacks in FNS on each of the references to
825 scalarizable variables. */
828 sra_walk_function (const struct sra_walk_fns *fns)
831 block_stmt_iterator si;
833 /* ??? Phase 4 could derive some benefit to walking the function in
834 dominator tree order. */
837 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
842 stmt = bsi_stmt (si);
843 ann = stmt_ann (stmt);
845 /* If the statement has no virtual operands, then it doesn't
846 make any structure references that we care about. */
847 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
848 && NUM_VUSES (VUSE_OPS (ann)) == 0
849 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
852 switch (TREE_CODE (stmt))
855 /* If we have "return <retval>" then the return value is
856 already exposed for our pleasure. Walk it as a USE to
857 force all the components back in place for the return.
859 If we have an embedded assignment, then <retval> is of
860 a type that gets returned in registers in this ABI, and
861 we do not wish to extend their lifetimes. Treat this
862 as a USE of the variable on the RHS of this assignment. */
864 t = TREE_OPERAND (stmt, 0);
865 if (TREE_CODE (t) == MODIFY_EXPR)
866 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
868 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
872 sra_walk_modify_expr (stmt, &si, fns);
875 sra_walk_call_expr (stmt, &si, fns);
878 sra_walk_asm_expr (stmt, &si, fns);
887 /* Phase One: Scan all referenced variables in the program looking for
888 structures that could be decomposed. */
891 find_candidates_for_sra (void)
894 bool any_set = false;
896 for (i = 0; i < num_referenced_vars; i++)
898 tree var = referenced_var (i);
899 if (decl_can_be_decomposed_p (var))
901 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
910 /* Phase Two: Scan all references to scalarizable variables. Count the
911 number of times they are used or copied respectively. */
913 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
914 considered a copy, because we can decompose the reference such that
915 the sub-elements needn't be contiguous. */
918 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
919 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
920 bool is_output ATTRIBUTE_UNUSED)
926 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
927 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
929 lhs_elt->n_copies += 1;
930 rhs_elt->n_copies += 1;
934 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
935 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
937 lhs_elt->n_copies += 1;
941 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
942 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
943 bool is_output ATTRIBUTE_UNUSED)
948 /* Dump the values we collected during the scanning phase. */
951 scan_dump (struct sra_elt *elt)
955 dump_sra_elt_name (dump_file, elt);
956 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
958 for (c = elt->children; c ; c = c->sibling)
962 /* Entry point to phase 2. Scan the entire function, building up
963 scalarization data structures, recording copies and uses. */
968 static const struct sra_walk_fns fns = {
969 scan_use, scan_copy, scan_init, scan_ldst, true
972 sra_walk_function (&fns);
974 if (dump_file && (dump_flags & TDF_DETAILS))
978 fputs ("\nScan results:\n", dump_file);
979 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
981 tree var = referenced_var (i);
982 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
986 fputc ('\n', dump_file);
990 /* Phase Three: Make decisions about which variables to scalarize, if any.
991 All elements to be scalarized have replacement variables made for them. */
993 /* A subroutine of build_element_name. Recursively build the element
994 name on the obstack. */
997 build_element_name_1 (struct sra_elt *elt)
1004 build_element_name_1 (elt->parent);
1005 obstack_1grow (&sra_obstack, '$');
1007 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1009 if (elt->element == integer_zero_node)
1010 obstack_grow (&sra_obstack, "real", 4);
1012 obstack_grow (&sra_obstack, "imag", 4);
1018 if (TREE_CODE (t) == INTEGER_CST)
1020 /* ??? Eh. Don't bother doing double-wide printing. */
1021 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1022 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1026 tree name = DECL_NAME (t);
1028 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1029 IDENTIFIER_LENGTH (name));
1032 sprintf (buffer, "D%u", DECL_UID (t));
1033 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1038 /* Construct a pretty variable name for an element's replacement variable.
1039 The name is built on the obstack. */
1042 build_element_name (struct sra_elt *elt)
1044 build_element_name_1 (elt);
1045 obstack_1grow (&sra_obstack, '\0');
1046 return obstack_finish (&sra_obstack);
1049 /* Instantiate an element as an independent variable. */
1052 instantiate_element (struct sra_elt *elt)
1054 struct sra_elt *base_elt;
1057 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1059 base = base_elt->element;
1061 elt->replacement = var = make_rename_temp (elt->type, "SR");
1062 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1063 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1064 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1066 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1068 char *pretty_name = build_element_name (elt);
1069 DECL_NAME (var) = get_identifier (pretty_name);
1070 obstack_free (&sra_obstack, pretty_name);
1075 fputs (" ", dump_file);
1076 dump_sra_elt_name (dump_file, elt);
1077 fputs (" -> ", dump_file);
1078 print_generic_expr (dump_file, var, dump_flags);
1079 fputc ('\n', dump_file);
1083 /* Make one pass across an element tree deciding whether or not it's
1084 profitable to instantiate individual leaf scalars.
1086 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1087 fields all the way up the tree. */
1090 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1091 unsigned int parent_copies)
1093 if (dump_file && !elt->parent)
1095 fputs ("Initial instantiation for ", dump_file);
1096 dump_sra_elt_name (dump_file, elt);
1097 fputc ('\n', dump_file);
1100 if (elt->cannot_scalarize)
1105 /* The decision is simple: instantiate if we're used more frequently
1106 than the parent needs to be seen as a complete unit. */
1107 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1108 instantiate_element (elt);
1113 unsigned int this_uses = elt->n_uses + parent_uses;
1114 unsigned int this_copies = elt->n_copies + parent_copies;
1116 for (c = elt->children; c ; c = c->sibling)
1117 decide_instantiation_1 (c, this_uses, this_copies);
1121 /* Compute the size and number of all instantiated elements below ELT.
1122 We will only care about this if the size of the complete structure
1123 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1126 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1128 if (elt->replacement)
1130 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1136 unsigned int count = 0;
1138 for (c = elt->children; c ; c = c->sibling)
1139 count += sum_instantiated_sizes (c, sizep);
1145 /* Instantiate fields in ELT->TYPE that are not currently present as
1148 static void instantiate_missing_elements (struct sra_elt *elt);
1151 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1153 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1156 if (sub->replacement == NULL)
1157 instantiate_element (sub);
1160 instantiate_missing_elements (sub);
1164 instantiate_missing_elements (struct sra_elt *elt)
1166 tree type = elt->type;
1168 switch (TREE_CODE (type))
1173 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1174 if (TREE_CODE (f) == FIELD_DECL)
1175 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1181 tree i, max, subtype;
1183 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1184 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1185 subtype = TREE_TYPE (type);
1189 instantiate_missing_elements_1 (elt, i, subtype);
1190 if (tree_int_cst_equal (i, max))
1192 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1199 type = TREE_TYPE (type);
1200 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1201 instantiate_missing_elements_1 (elt, integer_one_node, type);
1209 /* Make one pass across an element tree deciding whether to perform block
1210 or element copies. If we decide on element copies, instantiate all
1211 elements. Return true if there are any instantiated sub-elements. */
1214 decide_block_copy (struct sra_elt *elt)
1219 /* If scalarization is disabled, respect it. */
1220 if (elt->cannot_scalarize)
1222 elt->use_block_copy = 1;
1226 fputs ("Scalarization disabled for ", dump_file);
1227 dump_sra_elt_name (dump_file, elt);
1228 fputc ('\n', dump_file);
1234 /* Don't decide if we've no uses. */
1235 if (elt->n_uses == 0 && elt->n_copies == 0)
1238 else if (!elt->is_scalar)
1240 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1241 bool use_block_copy = true;
1243 /* Don't bother trying to figure out the rest if the structure is
1244 so large we can't do easy arithmetic. This also forces block
1245 copies for variable sized structures. */
1246 if (host_integerp (size_tree, 1))
1248 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1249 unsigned int inst_count;
1251 full_size = tree_low_cst (size_tree, 1);
1253 /* ??? What to do here. If there are two fields, and we've only
1254 instantiated one, then instantiating the other is clearly a win.
1255 If there are a large number of fields then the size of the copy
1256 is much more of a factor. */
1258 /* If the structure is small, and we've made copies, go ahead
1259 and instantiate, hoping that the copies will go away. */
1260 if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
1261 && elt->n_copies > elt->n_uses)
1262 use_block_copy = false;
1265 inst_count = sum_instantiated_sizes (elt, &inst_size);
1267 if (inst_size * 4 >= full_size * 3)
1268 use_block_copy = false;
1271 /* In order to avoid block copy, we have to be able to instantiate
1272 all elements of the type. See if this is possible. */
1274 && (!can_completely_scalarize_p (elt)
1275 || !type_can_instantiate_all_elements (elt->type)))
1276 use_block_copy = true;
1278 elt->use_block_copy = use_block_copy;
1282 fprintf (dump_file, "Using %s for ",
1283 use_block_copy ? "block-copy" : "element-copy");
1284 dump_sra_elt_name (dump_file, elt);
1285 fputc ('\n', dump_file);
1288 if (!use_block_copy)
1290 instantiate_missing_elements (elt);
1295 any_inst = elt->replacement != NULL;
1297 for (c = elt->children; c ; c = c->sibling)
1298 any_inst |= decide_block_copy (c);
1303 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1306 decide_instantiations (void)
1310 struct bitmap_head_def done_head;
1312 /* We cannot clear bits from a bitmap we're iterating over,
1313 so save up all the bits to clear until the end. */
1314 bitmap_initialize (&done_head, 1);
1315 cleared_any = false;
1317 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
1319 tree var = referenced_var (i);
1320 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1323 decide_instantiation_1 (elt, 0, 0);
1324 if (!decide_block_copy (elt))
1329 bitmap_set_bit (&done_head, i);
1336 bitmap_operation (sra_candidates, sra_candidates, &done_head,
1338 bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
1341 bitmap_clear (&done_head);
1344 fputc ('\n', dump_file);
1348 /* Phase Four: Update the function to match the replacements created. */
1350 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1351 renaming. This becomes necessary when we modify all of a non-scalar. */
1354 mark_all_v_defs (tree stmt)
1356 v_may_def_optype v_may_defs;
1357 v_must_def_optype v_must_defs;
1360 get_stmt_operands (stmt);
1362 v_may_defs = V_MAY_DEF_OPS (stmt_ann (stmt));
1363 n = NUM_V_MAY_DEFS (v_may_defs);
1364 for (i = 0; i < n; i++)
1366 tree sym = V_MAY_DEF_RESULT (v_may_defs, i);
1367 if (TREE_CODE (sym) == SSA_NAME)
1368 sym = SSA_NAME_VAR (sym);
1369 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1372 v_must_defs = V_MUST_DEF_OPS (stmt_ann (stmt));
1373 n = NUM_V_MUST_DEFS (v_must_defs);
1374 for (i = 0; i < n; i++)
1376 tree sym = V_MUST_DEF_OP (v_must_defs, i);
1377 if (TREE_CODE (sym) == SSA_NAME)
1378 sym = SSA_NAME_VAR (sym);
1379 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1383 /* Build a single level component reference to ELT rooted at BASE. */
1386 generate_one_element_ref (struct sra_elt *elt, tree base)
1388 switch (TREE_CODE (TREE_TYPE (base)))
1391 return build (COMPONENT_REF, elt->type, base, elt->element, NULL);
1394 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1397 if (elt->element == integer_zero_node)
1398 return build (REALPART_EXPR, elt->type, base);
1400 return build (IMAGPART_EXPR, elt->type, base);
1407 /* Build a full component reference to ELT rooted at its native variable. */
1410 generate_element_ref (struct sra_elt *elt)
1413 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1415 return elt->element;
1418 /* Generate a set of assignment statements in *LIST_P to copy all
1419 instantiated elements under ELT to or from the equivalent structure
1420 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1421 true meaning to copy out of EXPR into ELT. */
1424 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1430 if (elt->replacement)
1433 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1435 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1436 append_to_statement_list (t, list_p);
1440 for (c = elt->children; c ; c = c->sibling)
1442 t = generate_one_element_ref (c, unshare_expr (expr));
1443 generate_copy_inout (c, copy_out, t, list_p);
1448 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1449 elements under SRC to their counterparts under DST. There must be a 1-1
1450 correspondence of instantiated elements. */
1453 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1455 struct sra_elt *dc, *sc;
1457 for (dc = dst->children; dc ; dc = dc->sibling)
1459 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1462 generate_element_copy (dc, sc, list_p);
1465 if (dst->replacement)
1469 if (src->replacement == NULL)
1472 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1474 append_to_statement_list (t, list_p);
1478 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1479 elements under ELT. In addition, do not assign to elements that have been
1480 marked VISITED but do reset the visited flag; this allows easy coordination
1481 with generate_element_init. */
1484 generate_element_zero (struct sra_elt *elt, tree *list_p)
1488 for (c = elt->children; c ; c = c->sibling)
1489 generate_element_zero (c, list_p);
1492 elt->visited = false;
1493 else if (elt->replacement)
1498 t = fold_convert (elt->type, integer_zero_node);
1500 /* We generated a replacement for a non-scalar? */
1503 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1504 append_to_statement_list (t, list_p);
1508 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1509 elements under ELT with the contents of the initializer INIT. In addition,
1510 mark all assigned elements VISITED; this allows easy coordination with
1511 generate_element_zero. */
1514 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1516 enum tree_code init_code = TREE_CODE (init);
1517 struct sra_elt *sub;
1522 if (elt->replacement)
1524 t = build (MODIFY_EXPR, void_type_node, elt->replacement, init);
1525 append_to_statement_list (t, list_p);
1526 elt->visited = true;
1535 for (sub = elt->children; sub ; sub = sub->sibling)
1537 if (sub->element == integer_zero_node)
1538 t = (init_code == COMPLEX_EXPR
1539 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1541 t = (init_code == COMPLEX_EXPR
1542 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1543 generate_element_init (sub, t, list_p);
1548 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1550 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1553 generate_element_init (sub, TREE_VALUE (t), list_p);
1562 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1563 has more than one edge, STMT will be replicated for each edge. Also,
1564 abnormal edges will be ignored. */
1567 insert_edge_copies (tree stmt, basic_block bb)
1573 for (e = bb->succ; e; e = e->succ_next)
1575 /* We don't need to insert copies on abnormal edges. The
1576 value of the scalar replacement is not guaranteed to
1577 be valid through an abnormal edge. */
1578 if (!(e->flags & EDGE_ABNORMAL))
1582 bsi_insert_on_edge (e, stmt);
1586 bsi_insert_on_edge (e, lhd_unsave_expr_now (stmt));
1591 /* Helper function to insert LIST before BSI, and set up line number info. */
1594 sra_insert_before (block_stmt_iterator *bsi, tree list)
1596 tree stmt = bsi_stmt (*bsi);
1598 if (EXPR_HAS_LOCATION (stmt))
1599 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1600 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1603 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1606 sra_insert_after (block_stmt_iterator *bsi, tree list)
1608 tree stmt = bsi_stmt (*bsi);
1610 if (EXPR_HAS_LOCATION (stmt))
1611 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1613 if (stmt_ends_bb_p (stmt))
1614 insert_edge_copies (list, bsi->bb);
1616 bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
1619 /* Similarly, but replace the statement at BSI. */
1622 sra_replace (block_stmt_iterator *bsi, tree list)
1624 sra_insert_before (bsi, list);
1626 if (bsi_end_p (*bsi))
1627 *bsi = bsi_last (bsi->bb);
1632 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1633 if elt is scalar, or some occurrence of ELT that requires a complete
1634 aggregate. IS_OUTPUT is true if ELT is being modified. */
1637 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1640 tree list = NULL, stmt = bsi_stmt (*bsi);
1642 if (elt->replacement)
1644 /* If we have a replacement, then updating the reference is as
1645 simple as modifying the existing statement in place. */
1647 mark_all_v_defs (stmt);
1648 *expr_p = elt->replacement;
1653 /* Otherwise we need some copies. If ELT is being read, then we want
1654 to store all (modified) sub-elements back into the structure before
1655 the reference takes place. If ELT is being written, then we want to
1656 load the changed values back into our shadow variables. */
1657 /* ??? We don't check modified for reads, we just always write all of
1658 the values. We should be able to record the SSA number of the VOP
1659 for which the values were last read. If that number matches the
1660 SSA number of the VOP in the current statement, then we needn't
1661 emit an assignment. This would also eliminate double writes when
1662 a structure is passed as more than one argument to a function call.
1663 This optimization would be most effective if sra_walk_function
1664 processed the blocks in dominator order. */
1666 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1671 mark_all_v_defs (expr_first (list));
1672 sra_insert_after (bsi, list);
1675 sra_insert_before (bsi, list);
1679 /* Scalarize a COPY. To recap, this is an assignment statement between
1680 two scalarizable references, LHS_ELT and RHS_ELT. */
1683 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1684 block_stmt_iterator *bsi)
1688 if (lhs_elt->replacement && rhs_elt->replacement)
1690 /* If we have two scalar operands, modify the existing statement. */
1691 stmt = bsi_stmt (*bsi);
1693 #ifdef ENABLE_CHECKING
1694 /* See the commentary in sra_walk_function concerning
1695 RETURN_EXPR, and why we should never see one here. */
1696 if (TREE_CODE (stmt) != MODIFY_EXPR)
1700 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1701 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1704 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1706 /* If either side requires a block copy, then sync the RHS back
1707 to the original structure, leave the original assignment
1708 statement (which will perform the block copy), then load the
1709 LHS values out of its now-updated original structure. */
1710 /* ??? Could perform a modified pair-wise element copy. That
1711 would at least allow those elements that are instantiated in
1712 both structures to be optimized well. */
1715 generate_copy_inout (rhs_elt, false,
1716 generate_element_ref (rhs_elt), &list);
1719 mark_all_v_defs (expr_first (list));
1720 sra_insert_before (bsi, list);
1724 generate_copy_inout (lhs_elt, true,
1725 generate_element_ref (lhs_elt), &list);
1727 sra_insert_after (bsi, list);
1731 /* Otherwise both sides must be fully instantiated. In which
1732 case perform pair-wise element assignments and replace the
1733 original block copy statement. */
1735 stmt = bsi_stmt (*bsi);
1736 mark_all_v_defs (stmt);
1739 generate_element_copy (lhs_elt, rhs_elt, &list);
1742 sra_replace (bsi, list);
1746 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1747 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1748 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1752 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1756 /* Generate initialization statements for all members extant in the RHS. */
1758 generate_element_init (lhs_elt, rhs, &list);
1760 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1761 a zero value. Initialize the rest of the instantiated elements. */
1762 generate_element_zero (lhs_elt, &list);
1766 if (lhs_elt->use_block_copy)
1768 /* Since LHS is not fully instantiated, we must leave the structure
1769 assignment in place. Treating this case differently from a USE
1770 exposes constants to later optimizations. */
1771 mark_all_v_defs (expr_first (list));
1772 sra_insert_after (bsi, list);
1776 /* The LHS is fully instantiated. The list of initializations
1777 replaces the original structure assignment. */
1778 mark_all_v_defs (bsi_stmt (*bsi));
1779 sra_replace (bsi, list);
1783 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1784 on all INDIRECT_REFs. */
1787 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1791 if (TREE_CODE (t) == INDIRECT_REF)
1793 TREE_THIS_NOTRAP (t) = 1;
1796 else if (DECL_P (t) || TYPE_P (t))
1802 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1803 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1804 if ELT is on the left-hand side. */
1807 scalarize_ldst (struct sra_elt *elt, tree other,
1808 block_stmt_iterator *bsi, bool is_output)
1810 /* Shouldn't have gotten called for a scalar. */
1811 if (elt->replacement)
1814 if (elt->use_block_copy)
1816 /* Since ELT is not fully instantiated, we have to leave the
1817 block copy in place. Treat this as a USE. */
1818 scalarize_use (elt, NULL, bsi, is_output);
1822 /* The interesting case is when ELT is fully instantiated. In this
1823 case we can have each element stored/loaded directly to/from the
1824 corresponding slot in OTHER. This avoids a block copy. */
1826 tree list = NULL, stmt = bsi_stmt (*bsi);
1828 mark_all_v_defs (stmt);
1829 generate_copy_inout (elt, is_output, other, &list);
1833 /* Preserve EH semantics. */
1834 if (stmt_ends_bb_p (stmt))
1836 tree_stmt_iterator tsi;
1839 /* Extract the first statement from LIST. */
1840 tsi = tsi_start (list);
1841 first = tsi_stmt (tsi);
1844 /* Replace the old statement with this new representative. */
1845 bsi_replace (bsi, first, true);
1847 if (!tsi_end_p (tsi))
1849 /* If any reference would trap, then they all would. And more
1850 to the point, the first would. Therefore none of the rest
1851 will trap since the first didn't. Indicate this by
1852 iterating over the remaining statements and set
1853 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1856 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1859 while (!tsi_end_p (tsi));
1861 insert_edge_copies (list, bsi->bb);
1865 sra_replace (bsi, list);
1869 /* Generate initializations for all scalarizable parameters. */
1872 scalarize_parms (void)
1877 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
1879 tree var = referenced_var (i);
1880 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1881 generate_copy_inout (elt, true, var, &list);
1885 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1888 /* Entry point to phase 4. Update the function to match replacements. */
1891 scalarize_function (void)
1893 static const struct sra_walk_fns fns = {
1894 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
1897 sra_walk_function (&fns);
1899 bsi_commit_edge_inserts (NULL);
1903 /* Debug helper function. Print ELT in a nice human-readable format. */
1906 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
1908 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1910 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
1911 dump_sra_elt_name (f, elt->parent);
1916 dump_sra_elt_name (f, elt->parent);
1917 if (DECL_P (elt->element))
1919 if (TREE_CODE (elt->element) == FIELD_DECL)
1921 print_generic_expr (f, elt->element, dump_flags);
1924 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
1925 TREE_INT_CST_LOW (elt->element));
1929 /* Likewise, but callable from the debugger. */
1932 debug_sra_elt_name (struct sra_elt *elt)
1934 dump_sra_elt_name (stderr, elt);
1935 fputc ('\n', stderr);
1938 /* Main entry point. */
1943 /* Initialize local variables. */
1944 gcc_obstack_init (&sra_obstack);
1945 sra_candidates = BITMAP_XMALLOC ();
1946 needs_copy_in = BITMAP_XMALLOC ();
1947 sra_type_decomp_cache = BITMAP_XMALLOC ();
1948 sra_type_inst_cache = BITMAP_XMALLOC ();
1949 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
1951 /* Scan. If we find anything, instantiate and scalarize. */
1952 if (find_candidates_for_sra ())
1955 decide_instantiations ();
1956 scalarize_function ();
1959 /* Free allocated memory. */
1960 htab_delete (sra_map);
1962 BITMAP_XFREE (sra_candidates);
1963 BITMAP_XFREE (needs_copy_in);
1964 BITMAP_XFREE (sra_type_decomp_cache);
1965 BITMAP_XFREE (sra_type_inst_cache);
1966 obstack_free (&sra_obstack, NULL);
1972 return flag_tree_sra != 0;
1975 struct tree_opt_pass pass_sra =
1978 gate_sra, /* gate */
1979 tree_sra, /* execute */
1982 0, /* static_pass_number */
1983 TV_TREE_SRA, /* tv_id */
1984 PROP_cfg | PROP_ssa, /* properties_required */
1985 0, /* properties_provided */
1986 0, /* properties_destroyed */
1987 0, /* todo_flags_start */
1988 TODO_dump_func | TODO_rename_vars
1989 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */