1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
113 /* The type of the element. */
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
127 /* True if TYPE is scalar. */
130 /* True if this element is a group of members of its parent. */
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
144 /* A flag for use with/after random access traversals. */
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
150 /* 1 if the element is a field that is part of a block, 2 if the field
151 is the block itself, 0 if it's neither. */
152 char in_bitfld_block;
155 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
157 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
158 for ((CHILD) = (ELT)->is_group \
159 ? next_child_for_group (NULL, (ELT)) \
162 (CHILD) = (ELT)->is_group \
163 ? next_child_for_group ((CHILD), (ELT)) \
166 /* Helper function for above macro. Return next child in group. */
167 static struct sra_elt *
168 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
170 gcc_assert (group->is_group);
172 /* Find the next child in the parent. */
174 child = child->sibling;
176 child = group->parent->children;
178 /* Skip siblings that do not belong to the group. */
181 tree g_elt = group->element;
182 if (TREE_CODE (g_elt) == RANGE_EXPR)
184 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
185 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
191 child = child->sibling;
197 /* Random access to the child of a parent is performed by hashing.
198 This prevents quadratic behavior, and allows SRA to function
199 reasonably on larger records. */
200 static htab_t sra_map;
202 /* All structures are allocated out of the following obstack. */
203 static struct obstack sra_obstack;
205 /* Debugging functions. */
206 static void dump_sra_elt_name (FILE *, struct sra_elt *);
207 extern void debug_sra_elt_name (struct sra_elt *);
209 /* Forward declarations. */
210 static tree generate_element_ref (struct sra_elt *);
211 static tree sra_build_assignment (tree dst, tree src);
212 static void mark_all_v_defs (tree list);
215 /* Return true if DECL is an SRA candidate. */
218 is_sra_candidate_decl (tree decl)
220 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
223 /* Return true if TYPE is a scalar type. */
226 is_sra_scalar_type (tree type)
228 enum tree_code code = TREE_CODE (type);
229 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
230 || code == FIXED_POINT_TYPE
231 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
232 || code == POINTER_TYPE || code == OFFSET_TYPE
233 || code == REFERENCE_TYPE);
236 /* Return true if TYPE can be decomposed into a set of independent variables.
238 Note that this doesn't imply that all elements of TYPE can be
239 instantiated, just that if we decide to break up the type into
240 separate pieces that it can be done. */
243 sra_type_can_be_decomposed_p (tree type)
245 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
248 /* Avoid searching the same type twice. */
249 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
251 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
254 /* The type must have a definite nonzero size. */
255 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
256 || integer_zerop (TYPE_SIZE (type)))
259 /* The type must be a non-union aggregate. */
260 switch (TREE_CODE (type))
264 bool saw_one_field = false;
266 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
267 if (TREE_CODE (t) == FIELD_DECL)
269 /* Reject incorrectly represented bit fields. */
270 if (DECL_BIT_FIELD (t)
271 && (tree_low_cst (DECL_SIZE (t), 1)
272 != TYPE_PRECISION (TREE_TYPE (t))))
275 saw_one_field = true;
278 /* Record types must have at least one field. */
285 /* Array types must have a fixed lower and upper bound. */
286 t = TYPE_DOMAIN (type);
289 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
291 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
302 bitmap_set_bit (sra_type_decomp_cache, cache+0);
306 bitmap_set_bit (sra_type_decomp_cache, cache+1);
310 /* Return true if DECL can be decomposed into a set of independent
311 (though not necessarily scalar) variables. */
314 decl_can_be_decomposed_p (tree var)
316 /* Early out for scalars. */
317 if (is_sra_scalar_type (TREE_TYPE (var)))
320 /* The variable must not be aliased. */
321 if (!is_gimple_non_addressable (var))
323 if (dump_file && (dump_flags & TDF_DETAILS))
325 fprintf (dump_file, "Cannot scalarize variable ");
326 print_generic_expr (dump_file, var, dump_flags);
327 fprintf (dump_file, " because it must live in memory\n");
332 /* The variable must not be volatile. */
333 if (TREE_THIS_VOLATILE (var))
335 if (dump_file && (dump_flags & TDF_DETAILS))
337 fprintf (dump_file, "Cannot scalarize variable ");
338 print_generic_expr (dump_file, var, dump_flags);
339 fprintf (dump_file, " because it is declared volatile\n");
344 /* We must be able to decompose the variable's type. */
345 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
347 if (dump_file && (dump_flags & TDF_DETAILS))
349 fprintf (dump_file, "Cannot scalarize variable ");
350 print_generic_expr (dump_file, var, dump_flags);
351 fprintf (dump_file, " because its type cannot be decomposed\n");
356 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
357 confuse tree-stdarg.c, and we won't be able to figure out which and
358 how many arguments are accessed. This really should be improved in
359 tree-stdarg.c, as the decomposition is truely a win. This could also
360 be fixed if the stdarg pass ran early, but this can't be done until
361 we've aliasing information early too. See PR 30791. */
363 && TYPE_MAIN_VARIANT (TREE_TYPE (var))
364 == TYPE_MAIN_VARIANT (va_list_type_node))
370 /* Return true if TYPE can be *completely* decomposed into scalars. */
373 type_can_instantiate_all_elements (tree type)
375 if (is_sra_scalar_type (type))
377 if (!sra_type_can_be_decomposed_p (type))
380 switch (TREE_CODE (type))
384 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
387 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
389 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
392 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
393 if (TREE_CODE (f) == FIELD_DECL)
395 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
397 bitmap_set_bit (sra_type_inst_cache, cache+1);
402 bitmap_set_bit (sra_type_inst_cache, cache+0);
407 return type_can_instantiate_all_elements (TREE_TYPE (type));
417 /* Test whether ELT or some sub-element cannot be scalarized. */
420 can_completely_scalarize_p (struct sra_elt *elt)
424 if (elt->cannot_scalarize)
427 for (c = elt->children; c; c = c->sibling)
428 if (!can_completely_scalarize_p (c))
431 for (c = elt->groups; c; c = c->sibling)
432 if (!can_completely_scalarize_p (c))
439 /* A simplified tree hashing algorithm that only handles the types of
440 trees we expect to find in sra_elt->element. */
443 sra_hash_tree (tree t)
447 switch (TREE_CODE (t))
456 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
460 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
461 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
465 /* We can have types that are compatible, but have different member
466 lists, so we can't hash fields by ID. Use offsets instead. */
467 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
468 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
472 /* Don't take operand 0 into account, that's our parent. */
473 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
474 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
484 /* Hash function for type SRA_PAIR. */
487 sra_elt_hash (const void *x)
489 const struct sra_elt *e = x;
490 const struct sra_elt *p;
493 h = sra_hash_tree (e->element);
495 /* Take into account everything except bitfield blocks back up the
496 chain. Given that chain lengths are rarely very long, this
497 should be acceptable. If we truly identify this as a performance
498 problem, it should work to hash the pointer value
500 for (p = e->parent; p ; p = p->parent)
501 if (!p->in_bitfld_block)
502 h = (h * 65521) ^ sra_hash_tree (p->element);
507 /* Equality function for type SRA_PAIR. */
510 sra_elt_eq (const void *x, const void *y)
512 const struct sra_elt *a = x;
513 const struct sra_elt *b = y;
515 const struct sra_elt *ap = a->parent;
516 const struct sra_elt *bp = b->parent;
519 while (ap->in_bitfld_block)
522 while (bp->in_bitfld_block)
533 if (TREE_CODE (ae) != TREE_CODE (be))
536 switch (TREE_CODE (ae))
541 /* These are all pointer unique. */
545 /* Integers are not pointer unique, so compare their values. */
546 return tree_int_cst_equal (ae, be);
550 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
551 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
554 /* Fields are unique within a record, but not between
555 compatible records. */
556 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
558 return fields_compatible_p (ae, be);
562 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
563 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
570 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
571 may be null, in which case CHILD must be a DECL. */
573 static struct sra_elt *
574 lookup_element (struct sra_elt *parent, tree child, tree type,
575 enum insert_option insert)
577 struct sra_elt dummy;
578 struct sra_elt **slot;
582 dummy.parent = parent->is_group ? parent->parent : parent;
585 dummy.element = child;
587 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
588 if (!slot && insert == NO_INSERT)
592 if (!elt && insert == INSERT)
594 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
595 memset (elt, 0, sizeof (*elt));
597 elt->parent = parent;
598 elt->element = child;
600 elt->is_scalar = is_sra_scalar_type (type);
604 if (IS_ELEMENT_FOR_GROUP (elt->element))
606 elt->is_group = true;
607 elt->sibling = parent->groups;
608 parent->groups = elt;
612 elt->sibling = parent->children;
613 parent->children = elt;
617 /* If this is a parameter, then if we want to scalarize, we have
618 one copy from the true function parameter. Count it now. */
619 if (TREE_CODE (child) == PARM_DECL)
622 bitmap_set_bit (needs_copy_in, DECL_UID (child));
629 /* Create or return the SRA_ELT structure for EXPR if the expression
630 refers to a scalarizable variable. */
632 static struct sra_elt *
633 maybe_lookup_element_for_expr (tree expr)
638 switch (TREE_CODE (expr))
643 if (is_sra_candidate_decl (expr))
644 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
648 /* We can't scalarize variable array indices. */
649 if (in_array_bounds_p (expr))
650 child = TREE_OPERAND (expr, 1);
655 case ARRAY_RANGE_REF:
656 /* We can't scalarize variable array indices. */
657 if (range_in_array_bounds_p (expr))
659 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
660 child = build2 (RANGE_EXPR, integer_type_node,
661 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
669 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
670 /* Don't look through unions. */
671 if (TREE_CODE (type) != RECORD_TYPE)
673 /* Neither through variable-sized records. */
674 if (TYPE_SIZE (type) == NULL_TREE
675 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
677 child = TREE_OPERAND (expr, 1);
682 child = integer_zero_node;
685 child = integer_one_node;
692 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
694 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
699 /* Functions to walk just enough of the tree to see all scalarizable
700 references, and categorize them. */
702 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
703 various kinds of references seen. In all cases, *BSI is an iterator
704 pointing to the statement being processed. */
707 /* Invoked when ELT is required as a unit. Note that ELT might refer to
708 a leaf node, in which case this is a simple scalar reference. *EXPR_P
709 points to the location of the expression. IS_OUTPUT is true if this
710 is a left-hand-side reference. USE_ALL is true if we saw something we
711 couldn't quite identify and had to force the use of the entire object. */
712 void (*use) (struct sra_elt *elt, tree *expr_p,
713 block_stmt_iterator *bsi, bool is_output, bool use_all);
715 /* Invoked when we have a copy between two scalarizable references. */
716 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
717 block_stmt_iterator *bsi);
719 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
720 in which case it should be treated as an empty CONSTRUCTOR. */
721 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
723 /* Invoked when we have a copy between one scalarizable reference ELT
724 and one non-scalarizable reference OTHER without side-effects.
725 IS_OUTPUT is true if ELT is on the left-hand side. */
726 void (*ldst) (struct sra_elt *elt, tree other,
727 block_stmt_iterator *bsi, bool is_output);
729 /* True during phase 2, false during phase 4. */
730 /* ??? This is a hack. */
734 #ifdef ENABLE_CHECKING
735 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
738 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
739 void *data ATTRIBUTE_UNUSED)
742 enum tree_code code = TREE_CODE (t);
744 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
747 if (is_sra_candidate_decl (t))
757 /* Walk most expressions looking for a scalarizable aggregate.
758 If we find one, invoke FNS->USE. */
761 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
762 const struct sra_walk_fns *fns)
766 bool disable_scalarization = false;
767 bool use_all_p = false;
769 /* We're looking to collect a reference expression between EXPR and INNER,
770 such that INNER is a scalarizable decl and all other nodes through EXPR
771 are references that we can scalarize. If we come across something that
772 we can't scalarize, we reset EXPR. This has the effect of making it
773 appear that we're referring to the larger expression as a whole. */
776 switch (TREE_CODE (inner))
781 /* If there is a scalarizable decl at the bottom, then process it. */
782 if (is_sra_candidate_decl (inner))
784 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
785 if (disable_scalarization)
786 elt->cannot_scalarize = true;
788 fns->use (elt, expr_p, bsi, is_output, use_all_p);
793 /* Non-constant index means any member may be accessed. Prevent the
794 expression from being scalarized. If we were to treat this as a
795 reference to the whole array, we can wind up with a single dynamic
796 index reference inside a loop being overridden by several constant
797 index references during loop setup. It's possible that this could
798 be avoided by using dynamic usage counts based on BB trip counts
799 (based on loop analysis or profiling), but that hardly seems worth
801 /* ??? Hack. Figure out how to push this into the scan routines
802 without duplicating too much code. */
803 if (!in_array_bounds_p (inner))
805 disable_scalarization = true;
808 /* ??? Are we assured that non-constant bounds and stride will have
809 the same value everywhere? I don't think Fortran will... */
810 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
812 inner = TREE_OPERAND (inner, 0);
815 case ARRAY_RANGE_REF:
816 if (!range_in_array_bounds_p (inner))
818 disable_scalarization = true;
821 /* ??? See above non-constant bounds and stride . */
822 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
824 inner = TREE_OPERAND (inner, 0);
829 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
830 /* Don't look through unions. */
831 if (TREE_CODE (type) != RECORD_TYPE)
833 /* Neither through variable-sized records. */
834 if (TYPE_SIZE (type) == NULL_TREE
835 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
837 inner = TREE_OPERAND (inner, 0);
843 inner = TREE_OPERAND (inner, 0);
847 /* A bit field reference to a specific vector is scalarized but for
848 ones for inputs need to be marked as used on the left hand size so
849 when we scalarize it, we can mark that variable as non renamable. */
851 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
854 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
856 elt->is_vector_lhs = true;
859 /* A bit field reference (access to *multiple* fields simultaneously)
860 is not currently scalarized. Consider this an access to the full
861 outer element, to which walk_tree will bring us next. */
865 /* Similarly, a nop explicitly wants to look at an object in a
866 type other than the one we've scalarized. */
869 case VIEW_CONVERT_EXPR:
870 /* Likewise for a view conversion, but with an additional twist:
871 it can be on the LHS and, in this case, an access to the full
872 outer element would mean a killing def. So we need to punt
873 if we haven't already a full access to the current element,
874 because we cannot pretend to have a killing def if we only
875 have a partial access at some level. */
876 if (is_output && !use_all_p && inner != expr)
877 disable_scalarization = true;
881 /* This is a transparent wrapper. The entire inner expression really
886 expr_p = &TREE_OPERAND (inner, 0);
887 inner = expr = *expr_p;
892 #ifdef ENABLE_CHECKING
893 /* Validate that we're not missing any references. */
894 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
900 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
901 If we find one, invoke FNS->USE. */
904 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
905 const struct sra_walk_fns *fns)
908 for (op = list; op ; op = TREE_CHAIN (op))
909 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
912 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
913 If we find one, invoke FNS->USE. */
916 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
917 const struct sra_walk_fns *fns)
920 int nargs = call_expr_nargs (expr);
921 for (i = 0; i < nargs; i++)
922 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
925 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
926 aggregates. If we find one, invoke FNS->USE. */
929 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
930 const struct sra_walk_fns *fns)
932 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
933 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
936 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
939 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
940 const struct sra_walk_fns *fns)
942 struct sra_elt *lhs_elt, *rhs_elt;
945 lhs = GIMPLE_STMT_OPERAND (expr, 0);
946 rhs = GIMPLE_STMT_OPERAND (expr, 1);
947 lhs_elt = maybe_lookup_element_for_expr (lhs);
948 rhs_elt = maybe_lookup_element_for_expr (rhs);
950 /* If both sides are scalarizable, this is a COPY operation. */
951 if (lhs_elt && rhs_elt)
953 fns->copy (lhs_elt, rhs_elt, bsi);
957 /* If the RHS is scalarizable, handle it. There are only two cases. */
960 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
961 fns->ldst (rhs_elt, lhs, bsi, false);
963 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
966 /* If it isn't scalarizable, there may be scalarizable variables within, so
967 check for a call or else walk the RHS to see if we need to do any
968 copy-in operations. We need to do it before the LHS is scalarized so
969 that the statements get inserted in the proper place, before any
970 copy-out operations. */
973 tree call = get_call_expr_in (rhs);
975 sra_walk_call_expr (call, bsi, fns);
977 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
980 /* Likewise, handle the LHS being scalarizable. We have cases similar
981 to those above, but also want to handle RHS being constant. */
984 /* If this is an assignment from a constant, or constructor, then
985 we have access to all of the elements individually. Invoke INIT. */
986 if (TREE_CODE (rhs) == COMPLEX_EXPR
987 || TREE_CODE (rhs) == COMPLEX_CST
988 || TREE_CODE (rhs) == CONSTRUCTOR)
989 fns->init (lhs_elt, rhs, bsi);
991 /* If this is an assignment from read-only memory, treat this as if
992 we'd been passed the constructor directly. Invoke INIT. */
993 else if (TREE_CODE (rhs) == VAR_DECL
995 && TREE_READONLY (rhs)
996 && targetm.binds_local_p (rhs))
997 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
999 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1000 The lvalue requirement prevents us from trying to directly scalarize
1001 the result of a function call. Which would result in trying to call
1002 the function multiple times, and other evil things. */
1003 else if (!lhs_elt->is_scalar
1004 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1005 fns->ldst (lhs_elt, rhs, bsi, true);
1007 /* Otherwise we're being used in some context that requires the
1008 aggregate to be seen as a whole. Invoke USE. */
1010 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
1013 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1014 whole is not a scalarizable reference. There may be occurrences of
1015 scalarizable variables within, which implies a USE. */
1017 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
1020 /* Entry point to the walk functions. Search the entire function,
1021 invoking the callbacks in FNS on each of the references to
1022 scalarizable variables. */
1025 sra_walk_function (const struct sra_walk_fns *fns)
1028 block_stmt_iterator si, ni;
1030 /* ??? Phase 4 could derive some benefit to walking the function in
1031 dominator tree order. */
1034 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
1039 stmt = bsi_stmt (si);
1040 ann = stmt_ann (stmt);
1045 /* If the statement has no virtual operands, then it doesn't
1046 make any structure references that we care about. */
1047 if (gimple_aliases_computed_p (cfun)
1048 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1051 switch (TREE_CODE (stmt))
1054 /* If we have "return <retval>" then the return value is
1055 already exposed for our pleasure. Walk it as a USE to
1056 force all the components back in place for the return.
1058 If we have an embedded assignment, then <retval> is of
1059 a type that gets returned in registers in this ABI, and
1060 we do not wish to extend their lifetimes. Treat this
1061 as a USE of the variable on the RHS of this assignment. */
1063 t = TREE_OPERAND (stmt, 0);
1066 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1067 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1069 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1072 case GIMPLE_MODIFY_STMT:
1073 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1076 sra_walk_call_expr (stmt, &si, fns);
1079 sra_walk_asm_expr (stmt, &si, fns);
1088 /* Phase One: Scan all referenced variables in the program looking for
1089 structures that could be decomposed. */
1092 find_candidates_for_sra (void)
1094 bool any_set = false;
1096 referenced_var_iterator rvi;
1098 FOR_EACH_REFERENCED_VAR (var, rvi)
1100 if (decl_can_be_decomposed_p (var))
1102 bitmap_set_bit (sra_candidates, DECL_UID (var));
1111 /* Phase Two: Scan all references to scalarizable variables. Count the
1112 number of times they are used or copied respectively. */
1114 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1115 considered a copy, because we can decompose the reference such that
1116 the sub-elements needn't be contiguous. */
1119 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1120 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1121 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1127 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1128 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1130 lhs_elt->n_copies += 1;
1131 rhs_elt->n_copies += 1;
1135 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1136 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1138 lhs_elt->n_copies += 1;
1142 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1143 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1144 bool is_output ATTRIBUTE_UNUSED)
1149 /* Dump the values we collected during the scanning phase. */
1152 scan_dump (struct sra_elt *elt)
1156 dump_sra_elt_name (dump_file, elt);
1157 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1159 for (c = elt->children; c ; c = c->sibling)
1162 for (c = elt->groups; c ; c = c->sibling)
1166 /* Entry point to phase 2. Scan the entire function, building up
1167 scalarization data structures, recording copies and uses. */
1170 scan_function (void)
1172 static const struct sra_walk_fns fns = {
1173 scan_use, scan_copy, scan_init, scan_ldst, true
1177 sra_walk_function (&fns);
1179 if (dump_file && (dump_flags & TDF_DETAILS))
1183 fputs ("\nScan results:\n", dump_file);
1184 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1186 tree var = referenced_var (i);
1187 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1191 fputc ('\n', dump_file);
1195 /* Phase Three: Make decisions about which variables to scalarize, if any.
1196 All elements to be scalarized have replacement variables made for them. */
1198 /* A subroutine of build_element_name. Recursively build the element
1199 name on the obstack. */
1202 build_element_name_1 (struct sra_elt *elt)
1209 build_element_name_1 (elt->parent);
1210 obstack_1grow (&sra_obstack, '$');
1212 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1214 if (elt->element == integer_zero_node)
1215 obstack_grow (&sra_obstack, "real", 4);
1217 obstack_grow (&sra_obstack, "imag", 4);
1223 if (TREE_CODE (t) == INTEGER_CST)
1225 /* ??? Eh. Don't bother doing double-wide printing. */
1226 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1227 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1229 else if (TREE_CODE (t) == BIT_FIELD_REF)
1231 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1232 tree_low_cst (TREE_OPERAND (t, 2), 1));
1233 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1234 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1235 tree_low_cst (TREE_OPERAND (t, 1), 1));
1236 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1240 tree name = DECL_NAME (t);
1242 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1243 IDENTIFIER_LENGTH (name));
1246 sprintf (buffer, "D%u", DECL_UID (t));
1247 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1252 /* Construct a pretty variable name for an element's replacement variable.
1253 The name is built on the obstack. */
1256 build_element_name (struct sra_elt *elt)
1258 build_element_name_1 (elt);
1259 obstack_1grow (&sra_obstack, '\0');
1260 return XOBFINISH (&sra_obstack, char *);
1263 /* Instantiate an element as an independent variable. */
1266 instantiate_element (struct sra_elt *elt)
1268 struct sra_elt *base_elt;
1270 bool nowarn = TREE_NO_WARNING (elt->element);
1272 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1274 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1275 base = base_elt->element;
1277 elt->replacement = var = make_rename_temp (elt->type, "SR");
1279 if (DECL_P (elt->element)
1280 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1282 DECL_SIZE (var) = DECL_SIZE (elt->element);
1283 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1285 elt->in_bitfld_block = 1;
1286 elt->replacement = build3 (BIT_FIELD_REF, elt->type, var,
1289 ? size_binop (MINUS_EXPR,
1290 TYPE_SIZE (elt->type),
1295 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1296 they are not a gimple register. */
1297 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1298 DECL_GIMPLE_REG_P (var) = 0;
1300 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1301 DECL_ARTIFICIAL (var) = 1;
1303 if (TREE_THIS_VOLATILE (elt->type))
1305 TREE_THIS_VOLATILE (var) = 1;
1306 TREE_SIDE_EFFECTS (var) = 1;
1309 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1311 char *pretty_name = build_element_name (elt);
1312 DECL_NAME (var) = get_identifier (pretty_name);
1313 obstack_free (&sra_obstack, pretty_name);
1315 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1316 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1318 DECL_IGNORED_P (var) = 0;
1319 TREE_NO_WARNING (var) = nowarn;
1323 DECL_IGNORED_P (var) = 1;
1324 /* ??? We can't generate any warning that would be meaningful. */
1325 TREE_NO_WARNING (var) = 1;
1328 /* Zero-initialize bit-field scalarization variables, to avoid
1329 triggering undefined behavior. */
1330 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1331 || (var != elt->replacement
1332 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1334 tree init = sra_build_assignment (var, fold_convert (TREE_TYPE (var),
1335 integer_zero_node));
1336 insert_edge_copies (init, ENTRY_BLOCK_PTR);
1337 mark_all_v_defs (init);
1342 fputs (" ", dump_file);
1343 dump_sra_elt_name (dump_file, elt);
1344 fputs (" -> ", dump_file);
1345 print_generic_expr (dump_file, var, dump_flags);
1346 fputc ('\n', dump_file);
1350 /* Make one pass across an element tree deciding whether or not it's
1351 profitable to instantiate individual leaf scalars.
1353 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1354 fields all the way up the tree. */
1357 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1358 unsigned int parent_copies)
1360 if (dump_file && !elt->parent)
1362 fputs ("Initial instantiation for ", dump_file);
1363 dump_sra_elt_name (dump_file, elt);
1364 fputc ('\n', dump_file);
1367 if (elt->cannot_scalarize)
1372 /* The decision is simple: instantiate if we're used more frequently
1373 than the parent needs to be seen as a complete unit. */
1374 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1375 instantiate_element (elt);
1379 struct sra_elt *c, *group;
1380 unsigned int this_uses = elt->n_uses + parent_uses;
1381 unsigned int this_copies = elt->n_copies + parent_copies;
1383 /* Consider groups of sub-elements as weighing in favour of
1384 instantiation whatever their size. */
1385 for (group = elt->groups; group ; group = group->sibling)
1386 FOR_EACH_ACTUAL_CHILD (c, group)
1388 c->n_uses += group->n_uses;
1389 c->n_copies += group->n_copies;
1392 for (c = elt->children; c ; c = c->sibling)
1393 decide_instantiation_1 (c, this_uses, this_copies);
1397 /* Compute the size and number of all instantiated elements below ELT.
1398 We will only care about this if the size of the complete structure
1399 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1402 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1404 if (elt->replacement)
1406 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1412 unsigned int count = 0;
1414 for (c = elt->children; c ; c = c->sibling)
1415 count += sum_instantiated_sizes (c, sizep);
1421 /* Instantiate fields in ELT->TYPE that are not currently present as
1424 static void instantiate_missing_elements (struct sra_elt *elt);
1426 static struct sra_elt *
1427 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1429 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1432 if (sub->replacement == NULL)
1433 instantiate_element (sub);
1436 instantiate_missing_elements (sub);
1440 /* Obtain the canonical type for field F of ELEMENT. */
1443 canon_type_for_field (tree f, tree element)
1445 tree field_type = TREE_TYPE (f);
1447 /* canonicalize_component_ref() unwidens some bit-field types (not
1448 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1449 may introduce type mismatches. */
1450 if (INTEGRAL_TYPE_P (field_type)
1451 && DECL_MODE (f) != TYPE_MODE (field_type))
1452 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1461 /* Look for adjacent fields of ELT starting at F that we'd like to
1462 scalarize as a single variable. Return the last field of the
1466 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1469 unsigned HOST_WIDE_INT align, bit, size, alchk;
1470 enum machine_mode mode;
1471 tree first = f, prev;
1473 struct sra_elt *block;
1475 if (!is_sra_scalar_type (TREE_TYPE (f))
1476 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1477 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1478 || !host_integerp (DECL_SIZE (f), 1)
1479 || lookup_element (elt, f, NULL, NO_INSERT))
1484 /* For complex and array objects, there are going to be integer
1485 literals as child elements. In this case, we can't just take the
1486 alignment and mode of the decl, so we instead rely on the element
1489 ??? We could try to infer additional alignment from the full
1490 object declaration and the location of the sub-elements we're
1492 for (count = 0; !DECL_P (block->element); count++)
1493 block = block->parent;
1495 align = DECL_ALIGN (block->element);
1496 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1500 type = TREE_TYPE (block->element);
1502 type = TREE_TYPE (type);
1504 align = TYPE_ALIGN (type);
1505 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1511 /* Coalescing wider fields is probably pointless and
1513 if (align > BITS_PER_WORD)
1514 align = BITS_PER_WORD;
1516 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1517 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1518 size = tree_low_cst (DECL_SIZE (f), 1);
1523 if ((bit & alchk) != ((bit + size - 1) & alchk))
1526 /* Find adjacent fields in the same alignment word. */
1528 for (prev = f, f = TREE_CHAIN (f);
1529 f && TREE_CODE (f) == FIELD_DECL
1530 && is_sra_scalar_type (TREE_TYPE (f))
1531 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1532 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1533 && host_integerp (DECL_SIZE (f), 1)
1534 && !lookup_element (elt, f, NULL, NO_INSERT);
1535 prev = f, f = TREE_CHAIN (f))
1537 unsigned HOST_WIDE_INT nbit, nsize;
1539 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1540 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1541 nsize = tree_low_cst (DECL_SIZE (f), 1);
1543 if (bit + size == nbit)
1545 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1547 /* If we're at an alignment boundary, don't bother
1548 growing alignment such that we can include this next
1551 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1554 align = GET_MODE_BITSIZE (DECL_MODE (f));
1558 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1563 else if (nbit + nsize == bit)
1565 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1568 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1571 align = GET_MODE_BITSIZE (DECL_MODE (f));
1575 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1590 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1592 /* Try to widen the bit range so as to cover padding bits as well. */
1594 if ((bit & ~alchk) || size != align)
1596 unsigned HOST_WIDE_INT mbit = bit & alchk;
1597 unsigned HOST_WIDE_INT msize = align;
1599 for (f = TYPE_FIELDS (elt->type);
1600 f; f = TREE_CHAIN (f))
1602 unsigned HOST_WIDE_INT fbit, fsize;
1604 /* Skip the fields from first to prev. */
1611 if (!(TREE_CODE (f) == FIELD_DECL
1612 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1613 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1616 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1617 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1619 /* If we're past the selected word, we're fine. */
1620 if ((bit & alchk) < (fbit & alchk))
1623 if (host_integerp (DECL_SIZE (f), 1))
1624 fsize = tree_low_cst (DECL_SIZE (f), 1);
1626 /* Assume a variable-sized field takes up all space till
1627 the end of the word. ??? Endianness issues? */
1628 fsize = align - (fbit & alchk);
1630 if ((fbit & alchk) < (bit & alchk))
1632 /* A large field might start at a previous word and
1633 extend into the selected word. Exclude those
1634 bits. ??? Endianness issues? */
1635 HOST_WIDE_INT diff = fbit + fsize - mbit;
1645 /* Non-overlapping, great. */
1646 if (fbit + fsize <= mbit
1647 || mbit + msize <= fbit)
1652 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1656 else if (fbit > mbit)
1657 msize -= (mbit + msize - fbit);
1667 /* Now we know the bit range we're interested in. Find the smallest
1668 machine mode we can use to access it. */
1670 for (mode = smallest_mode_for_size (size, MODE_INT);
1672 mode = GET_MODE_WIDER_MODE (mode))
1674 gcc_assert (mode != VOIDmode);
1676 alchk = GET_MODE_PRECISION (mode) - 1;
1679 if ((bit & alchk) == ((bit + size - 1) & alchk))
1683 gcc_assert (~alchk < align);
1685 /* Create the field group as a single variable. */
1687 /* We used to create a type for the mode above, but size turns
1688 to be out not of mode-size. As we need a matching type
1689 to build a BIT_FIELD_REF, use a nonstandard integer type as
1691 type = lang_hooks.types.type_for_size (size, 1);
1692 if (!type || TYPE_PRECISION (type) != size)
1693 type = build_nonstandard_integer_type (size, 1);
1695 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1699 block = instantiate_missing_elements_1 (elt, var, type);
1700 gcc_assert (block && block->is_scalar);
1702 var = block->replacement;
1705 || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
1707 block->replacement = build3 (BIT_FIELD_REF,
1708 TREE_TYPE (block->element), var,
1710 bitsize_int (bit & ~alchk));
1713 block->in_bitfld_block = 2;
1715 /* Add the member fields to the group, such that they access
1716 portions of the group variable. */
1718 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1720 tree field_type = canon_type_for_field (f, elt->element);
1721 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1723 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1725 fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
1728 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1731 (DECL_FIELD_BIT_OFFSET (f))))
1733 fld->in_bitfld_block = 1;
1740 instantiate_missing_elements (struct sra_elt *elt)
1742 tree type = elt->type;
1744 switch (TREE_CODE (type))
1749 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1750 if (TREE_CODE (f) == FIELD_DECL)
1752 tree last = try_instantiate_multiple_fields (elt, f);
1760 instantiate_missing_elements_1 (elt, f,
1761 canon_type_for_field
1769 tree i, max, subtype;
1771 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1772 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1773 subtype = TREE_TYPE (type);
1777 instantiate_missing_elements_1 (elt, i, subtype);
1778 if (tree_int_cst_equal (i, max))
1780 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1787 type = TREE_TYPE (type);
1788 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1789 instantiate_missing_elements_1 (elt, integer_one_node, type);
1797 /* Return true if there is only one non aggregate field in the record, TYPE.
1798 Return false otherwise. */
1801 single_scalar_field_in_record_p (tree type)
1805 if (TREE_CODE (type) != RECORD_TYPE)
1808 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1809 if (TREE_CODE (field) == FIELD_DECL)
1813 if (num_fields == 2)
1816 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1823 /* Make one pass across an element tree deciding whether to perform block
1824 or element copies. If we decide on element copies, instantiate all
1825 elements. Return true if there are any instantiated sub-elements. */
1828 decide_block_copy (struct sra_elt *elt)
1833 /* We shouldn't be invoked on groups of sub-elements as they must
1834 behave like their parent as far as block copy is concerned. */
1835 gcc_assert (!elt->is_group);
1837 /* If scalarization is disabled, respect it. */
1838 if (elt->cannot_scalarize)
1840 elt->use_block_copy = 1;
1844 fputs ("Scalarization disabled for ", dump_file);
1845 dump_sra_elt_name (dump_file, elt);
1846 fputc ('\n', dump_file);
1849 /* Disable scalarization of sub-elements */
1850 for (c = elt->children; c; c = c->sibling)
1852 c->cannot_scalarize = 1;
1853 decide_block_copy (c);
1856 /* Groups behave like their parent. */
1857 for (c = elt->groups; c; c = c->sibling)
1859 c->cannot_scalarize = 1;
1860 c->use_block_copy = 1;
1866 /* Don't decide if we've no uses and no groups. */
1867 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1870 else if (!elt->is_scalar)
1872 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1873 bool use_block_copy = true;
1875 /* Tradeoffs for COMPLEX types pretty much always make it better
1876 to go ahead and split the components. */
1877 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1878 use_block_copy = false;
1880 /* Don't bother trying to figure out the rest if the structure is
1881 so large we can't do easy arithmetic. This also forces block
1882 copies for variable sized structures. */
1883 else if (host_integerp (size_tree, 1))
1885 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1886 unsigned int max_size, max_count, inst_count, full_count;
1888 /* If the sra-max-structure-size parameter is 0, then the
1889 user has not overridden the parameter and we can choose a
1890 sensible default. */
1891 max_size = SRA_MAX_STRUCTURE_SIZE
1892 ? SRA_MAX_STRUCTURE_SIZE
1893 : MOVE_RATIO * UNITS_PER_WORD;
1894 max_count = SRA_MAX_STRUCTURE_COUNT
1895 ? SRA_MAX_STRUCTURE_COUNT
1898 full_size = tree_low_cst (size_tree, 1);
1899 full_count = count_type_elements (elt->type, false);
1900 inst_count = sum_instantiated_sizes (elt, &inst_size);
1902 /* If there is only one scalar field in the record, don't block copy. */
1903 if (single_scalar_field_in_record_p (elt->type))
1904 use_block_copy = false;
1906 /* ??? What to do here. If there are two fields, and we've only
1907 instantiated one, then instantiating the other is clearly a win.
1908 If there are a large number of fields then the size of the copy
1909 is much more of a factor. */
1911 /* If the structure is small, and we've made copies, go ahead
1912 and instantiate, hoping that the copies will go away. */
1913 if (full_size <= max_size
1914 && (full_count - inst_count) <= max_count
1915 && elt->n_copies > elt->n_uses)
1916 use_block_copy = false;
1917 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1918 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1919 use_block_copy = false;
1921 /* In order to avoid block copy, we have to be able to instantiate
1922 all elements of the type. See if this is possible. */
1924 && (!can_completely_scalarize_p (elt)
1925 || !type_can_instantiate_all_elements (elt->type)))
1926 use_block_copy = true;
1929 elt->use_block_copy = use_block_copy;
1931 /* Groups behave like their parent. */
1932 for (c = elt->groups; c; c = c->sibling)
1933 c->use_block_copy = use_block_copy;
1937 fprintf (dump_file, "Using %s for ",
1938 use_block_copy ? "block-copy" : "element-copy");
1939 dump_sra_elt_name (dump_file, elt);
1940 fputc ('\n', dump_file);
1943 if (!use_block_copy)
1945 instantiate_missing_elements (elt);
1950 any_inst = elt->replacement != NULL;
1952 for (c = elt->children; c ; c = c->sibling)
1953 any_inst |= decide_block_copy (c);
1958 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1961 decide_instantiations (void)
1965 bitmap_head done_head;
1968 /* We cannot clear bits from a bitmap we're iterating over,
1969 so save up all the bits to clear until the end. */
1970 bitmap_initialize (&done_head, &bitmap_default_obstack);
1971 cleared_any = false;
1973 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1975 tree var = referenced_var (i);
1976 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1979 decide_instantiation_1 (elt, 0, 0);
1980 if (!decide_block_copy (elt))
1985 bitmap_set_bit (&done_head, i);
1992 bitmap_and_compl_into (sra_candidates, &done_head);
1993 bitmap_and_compl_into (needs_copy_in, &done_head);
1995 bitmap_clear (&done_head);
1997 mark_set_for_renaming (sra_candidates);
2000 fputc ('\n', dump_file);
2004 /* Phase Four: Update the function to match the replacements created. */
2006 /* Mark all the variables in VDEF/VUSE operators for STMT for
2007 renaming. This becomes necessary when we modify all of a
2011 mark_all_v_defs_1 (tree stmt)
2016 update_stmt_if_modified (stmt);
2018 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
2020 if (TREE_CODE (sym) == SSA_NAME)
2021 sym = SSA_NAME_VAR (sym);
2022 mark_sym_for_renaming (sym);
2027 /* Mark all the variables in virtual operands in all the statements in
2028 LIST for renaming. */
2031 mark_all_v_defs (tree list)
2033 if (TREE_CODE (list) != STATEMENT_LIST)
2034 mark_all_v_defs_1 (list);
2037 tree_stmt_iterator i;
2038 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
2039 mark_all_v_defs_1 (tsi_stmt (i));
2044 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2047 mark_no_warning (struct sra_elt *elt)
2049 if (!elt->all_no_warning)
2051 if (elt->replacement)
2052 TREE_NO_WARNING (elt->replacement) = 1;
2056 FOR_EACH_ACTUAL_CHILD (c, elt)
2057 mark_no_warning (c);
2059 elt->all_no_warning = true;
2063 /* Build a single level component reference to ELT rooted at BASE. */
2066 generate_one_element_ref (struct sra_elt *elt, tree base)
2068 switch (TREE_CODE (TREE_TYPE (base)))
2072 tree field = elt->element;
2074 /* We can't test elt->in_bitfld_blk here because, when this is
2075 called from instantiate_element, we haven't set this field
2077 if (TREE_CODE (field) == BIT_FIELD_REF)
2079 tree ret = unshare_expr (field);
2080 TREE_OPERAND (ret, 0) = base;
2084 /* Watch out for compatible records with differing field lists. */
2085 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2086 field = find_compatible_field (TREE_TYPE (base), field);
2088 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2092 if (TREE_CODE (elt->element) == RANGE_EXPR)
2093 return build4 (ARRAY_RANGE_REF, elt->type, base,
2094 TREE_OPERAND (elt->element, 0), NULL, NULL);
2096 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2099 if (elt->element == integer_zero_node)
2100 return build1 (REALPART_EXPR, elt->type, base);
2102 return build1 (IMAGPART_EXPR, elt->type, base);
2109 /* Build a full component reference to ELT rooted at its native variable. */
2112 generate_element_ref (struct sra_elt *elt)
2115 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2117 return elt->element;
2120 /* Return true if BF is a bit-field that we can handle like a scalar. */
2123 scalar_bitfield_p (tree bf)
2125 return (TREE_CODE (bf) == BIT_FIELD_REF
2126 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2127 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2128 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2129 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2130 (TREE_OPERAND (bf, 0))))
2131 <= BITS_PER_WORD)))));
2134 /* Create an assignment statement from SRC to DST. */
2137 sra_build_assignment (tree dst, tree src)
2139 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2140 to do a much better job at optimizing the code.
2141 From dst = BIT_FIELD_REF <var, sz, off> we produce
2143 SR.1 = (scalar type) var;
2145 SR.3 = SR.2 & ((1 << sz) - 1);
2146 ... possible sign extension of SR.3 ...
2147 dst = (destination type) SR.3;
2149 if (scalar_bitfield_p (src))
2151 tree var, shift, width;
2152 tree utype, stype, stmp, utmp, dtmp;
2154 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2155 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2157 var = TREE_OPERAND (src, 0);
2158 width = TREE_OPERAND (src, 1);
2159 /* The offset needs to be adjusted to a right shift quantity
2160 depending on the endianess. */
2161 if (BYTES_BIG_ENDIAN)
2163 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2164 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2167 shift = TREE_OPERAND (src, 2);
2169 /* In weird cases we have non-integral types for the source or
2171 ??? For unknown reasons we also want an unsigned scalar type. */
2172 stype = TREE_TYPE (var);
2173 if (!INTEGRAL_TYPE_P (stype))
2174 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2175 (TYPE_SIZE (stype)), 1);
2176 else if (!TYPE_UNSIGNED (stype))
2177 stype = unsigned_type_for (stype);
2179 utype = TREE_TYPE (dst);
2180 if (!INTEGRAL_TYPE_P (utype))
2181 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2182 (TYPE_SIZE (utype)), 1);
2183 else if (!TYPE_UNSIGNED (utype))
2184 utype = unsigned_type_for (utype);
2187 stmp = make_rename_temp (stype, "SR");
2189 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2190 we use for computation if we cannot use it directly. */
2191 if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
2193 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2194 stmt = build_gimple_modify_stmt (stmp,
2195 fold_convert (stype, var));
2197 stmt = build_gimple_modify_stmt (stmp,
2198 fold_build1 (VIEW_CONVERT_EXPR,
2200 append_to_statement_list (stmt, &list);
2204 if (!integer_zerop (shift))
2206 stmt = build_gimple_modify_stmt (stmp,
2207 fold_build2 (RSHIFT_EXPR, stype,
2209 append_to_statement_list (stmt, &list);
2213 /* If we need a masking operation, produce one. */
2214 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2218 tree one = build_int_cst_wide (stype, 1, 0);
2219 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2220 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2222 stmt = build_gimple_modify_stmt (stmp,
2223 fold_build2 (BIT_AND_EXPR, stype,
2225 append_to_statement_list (stmt, &list);
2229 /* After shifting and masking, convert to the target type. */
2231 if (!useless_type_conversion_p (utype, stype))
2233 utmp = make_rename_temp (utype, "SR");
2235 stmt = build_gimple_modify_stmt (utmp, fold_convert (utype, var));
2236 append_to_statement_list (stmt, &list);
2241 /* Perform sign extension, if required.
2242 ??? This should never be necessary. */
2245 tree signbit = int_const_binop (LSHIFT_EXPR,
2246 build_int_cst_wide (utype, 1, 0),
2247 size_binop (MINUS_EXPR, width,
2248 bitsize_int (1)), 0);
2250 stmt = build_gimple_modify_stmt (utmp,
2251 fold_build2 (BIT_XOR_EXPR, utype,
2253 append_to_statement_list (stmt, &list);
2255 stmt = build_gimple_modify_stmt (utmp,
2256 fold_build2 (MINUS_EXPR, utype,
2258 append_to_statement_list (stmt, &list);
2263 /* Finally, move and convert to the destination. */
2264 if (!useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (var)))
2266 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2267 var = fold_convert (TREE_TYPE (dst), var);
2269 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2271 /* If the destination is not a register the conversion needs
2272 to be a separate statement. */
2273 if (!is_gimple_reg (dst))
2275 dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
2276 stmt = build_gimple_modify_stmt (dtmp, var);
2277 append_to_statement_list (stmt, &list);
2281 stmt = build_gimple_modify_stmt (dst, var);
2282 append_to_statement_list (stmt, &list);
2287 /* It was hoped that we could perform some type sanity checking
2288 here, but since front-ends can emit accesses of fields in types
2289 different from their nominal types and copy structures containing
2290 them as a whole, we'd have to handle such differences here.
2291 Since such accesses under different types require compatibility
2292 anyway, there's little point in making tests and/or adding
2293 conversions to ensure the types of src and dst are the same.
2294 So we just assume type differences at this point are ok.
2295 The only exception we make here are pointer types, which can be different
2296 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2297 if (POINTER_TYPE_P (TREE_TYPE (dst))
2298 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2299 src = fold_convert (TREE_TYPE (dst), src);
2301 return build_gimple_modify_stmt (dst, src);
2304 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2305 takes care of assignments, but we must create copies for uses. */
2306 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2308 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2309 BIT_FIELD_REF, turn it into bit operations. */
2312 sra_build_bf_assignment (tree dst, tree src)
2314 tree var, type, utype, tmp, tmp2, tmp3;
2316 tree cst, cst2, mask;
2317 tree minshift, maxshift;
2319 if (TREE_CODE (dst) != BIT_FIELD_REF)
2320 return sra_build_assignment (dst, src);
2322 var = TREE_OPERAND (dst, 0);
2324 if (!scalar_bitfield_p (dst))
2325 return sra_build_assignment (REPLDUP (dst), src);
2329 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2330 cst2 = size_binop (PLUS_EXPR,
2331 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2334 if (BYTES_BIG_ENDIAN)
2336 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2337 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2345 type = TREE_TYPE (var);
2346 if (!INTEGRAL_TYPE_P (type))
2347 type = lang_hooks.types.type_for_size
2348 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2349 if (TYPE_UNSIGNED (type))
2352 utype = unsigned_type_for (type);
2354 mask = build_int_cst_wide (utype, 1, 0);
2355 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2356 cst = build_int_cst_wide (utype, 0, 0);
2358 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2359 if (integer_zerop (minshift))
2362 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2363 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2364 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2366 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2367 && !integer_zerop (mask))
2370 if (!is_gimple_variable (tmp))
2371 tmp = unshare_expr (var);
2373 tmp2 = make_rename_temp (utype, "SR");
2375 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2376 stmt = build_gimple_modify_stmt (tmp2, fold_convert (utype, tmp));
2378 stmt = build_gimple_modify_stmt (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2380 append_to_statement_list (stmt, &list);
2385 if (!integer_zerop (mask))
2387 tmp = make_rename_temp (utype, "SR");
2388 stmt = build_gimple_modify_stmt (tmp,
2389 fold_build2 (BIT_AND_EXPR, utype,
2391 append_to_statement_list (stmt, &list);
2396 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2398 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2400 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2401 stmt = sra_build_assignment (tmp2, src);
2402 append_to_statement_list (stmt, &list);
2406 tmp2 = make_rename_temp
2407 (lang_hooks.types.type_for_size
2408 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2410 stmt = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2411 TREE_TYPE (tmp2), src));
2412 append_to_statement_list (stmt, &list);
2415 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2417 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2418 tmp3 = make_rename_temp (ut, "SR");
2419 tmp2 = fold_convert (ut, tmp2);
2420 stmt = sra_build_assignment (tmp3, tmp2);
2421 append_to_statement_list (stmt, &list);
2423 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2424 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2425 tmp2 = fold_convert (ut, tmp2);
2426 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2430 tmp3 = make_rename_temp (ut, "SR");
2431 stmt = sra_build_assignment (tmp3, tmp2);
2432 append_to_statement_list (stmt, &list);
2438 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2440 tmp3 = make_rename_temp (utype, "SR");
2441 tmp2 = fold_convert (utype, tmp2);
2442 stmt = sra_build_assignment (tmp3, tmp2);
2443 append_to_statement_list (stmt, &list);
2447 if (!integer_zerop (minshift))
2449 tmp3 = make_rename_temp (utype, "SR");
2450 stmt = build_gimple_modify_stmt (tmp3,
2451 fold_build2 (LSHIFT_EXPR, utype,
2453 append_to_statement_list (stmt, &list);
2457 if (utype != TREE_TYPE (var))
2458 tmp3 = make_rename_temp (utype, "SR");
2461 stmt = build_gimple_modify_stmt (tmp3,
2462 fold_build2 (BIT_IOR_EXPR, utype,
2464 append_to_statement_list (stmt, &list);
2468 if (TREE_TYPE (var) == type)
2469 stmt = build_gimple_modify_stmt (var,
2470 fold_convert (type, tmp3));
2472 stmt = build_gimple_modify_stmt (var,
2473 fold_build1 (VIEW_CONVERT_EXPR,
2474 TREE_TYPE (var), tmp3));
2475 append_to_statement_list (stmt, &list);
2481 /* Expand an assignment of SRC to the scalarized representation of
2482 ELT. If it is a field group, try to widen the assignment to cover
2483 the full variable. */
2486 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2488 tree dst = elt->replacement;
2489 tree var, tmp, cst, cst2, list, stmt;
2491 if (TREE_CODE (dst) != BIT_FIELD_REF
2492 || !elt->in_bitfld_block)
2493 return sra_build_assignment (REPLDUP (dst), src);
2495 var = TREE_OPERAND (dst, 0);
2497 /* Try to widen the assignment to the entire variable.
2498 We need the source to be a BIT_FIELD_REF as well, such that, for
2499 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2500 by design, conditions are met such that we can turn it into
2501 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2502 if (elt->in_bitfld_block == 2
2503 && TREE_CODE (src) == BIT_FIELD_REF)
2506 cst = TYPE_SIZE (TREE_TYPE (var));
2507 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2508 TREE_OPERAND (dst, 2));
2510 src = TREE_OPERAND (src, 0);
2512 /* Avoid full-width bit-fields. */
2513 if (integer_zerop (cst2)
2514 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2516 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2517 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2518 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2520 /* If a single conversion won't do, we'll need a statement
2522 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2523 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2527 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2528 src = fold_build1 (VIEW_CONVERT_EXPR,
2529 lang_hooks.types.type_for_size
2531 (TYPE_SIZE (TREE_TYPE (src))),
2533 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2535 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2536 stmt = build_gimple_modify_stmt (tmp, src);
2537 append_to_statement_list (stmt, &list);
2539 stmt = sra_build_assignment (var,
2540 fold_convert (TREE_TYPE (var),
2542 append_to_statement_list (stmt, &list);
2547 src = fold_convert (TREE_TYPE (var), src);
2551 src = fold_convert (TREE_TYPE (var), tmp);
2554 return sra_build_assignment (var, src);
2557 return sra_build_bf_assignment (dst, src);
2560 /* Generate a set of assignment statements in *LIST_P to copy all
2561 instantiated elements under ELT to or from the equivalent structure
2562 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2563 true meaning to copy out of EXPR into ELT. */
2566 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2572 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2573 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2577 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2579 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2582 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2583 t = sra_build_bf_assignment (expr, t);
2584 SSA_NAME_DEF_STMT (expr) = t;
2585 append_to_statement_list (t, list_p);
2587 else if (elt->replacement)
2590 t = sra_build_elt_assignment (elt, expr);
2592 t = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2593 append_to_statement_list (t, list_p);
2597 FOR_EACH_ACTUAL_CHILD (c, elt)
2599 t = generate_one_element_ref (c, unshare_expr (expr));
2600 generate_copy_inout (c, copy_out, t, list_p);
2605 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2606 elements under SRC to their counterparts under DST. There must be a 1-1
2607 correspondence of instantiated elements. */
2610 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
2612 struct sra_elt *dc, *sc;
2614 FOR_EACH_ACTUAL_CHILD (dc, dst)
2616 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2617 if (!sc && dc->in_bitfld_block == 2)
2619 struct sra_elt *dcs;
2621 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2623 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2625 generate_element_copy (dcs, sc, list_p);
2631 /* If DST and SRC are structs with the same elements, but do not have
2632 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2633 will fail. Try harder by finding the corresponding FIELD_DECL
2639 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2640 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2641 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2642 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2643 DECL_FIELD_OFFSET (dc->element)) > 0
2644 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2645 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2646 && simple_cst_equal (DECL_SIZE (f),
2647 DECL_SIZE (dc->element)) > 0
2648 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2650 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2651 && POINTER_TYPE_P (TREE_TYPE (f)))))
2653 gcc_assert (f != NULL_TREE);
2654 sc = lookup_element (src, f, NULL, NO_INSERT);
2657 generate_element_copy (dc, sc, list_p);
2660 if (dst->replacement)
2664 gcc_assert (src->replacement);
2666 t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2667 append_to_statement_list (t, list_p);
2671 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2672 elements under ELT. In addition, do not assign to elements that have been
2673 marked VISITED but do reset the visited flag; this allows easy coordination
2674 with generate_element_init. */
2677 generate_element_zero (struct sra_elt *elt, tree *list_p)
2683 elt->visited = false;
2687 if (!elt->in_bitfld_block)
2688 FOR_EACH_ACTUAL_CHILD (c, elt)
2689 generate_element_zero (c, list_p);
2691 if (elt->replacement)
2695 gcc_assert (elt->is_scalar);
2696 t = fold_convert (elt->type, integer_zero_node);
2698 t = sra_build_elt_assignment (elt, t);
2699 append_to_statement_list (t, list_p);
2703 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2704 Add the result to *LIST_P. */
2707 generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
2709 /* The replacement can be almost arbitrarily complex. Gimplify. */
2710 tree stmt = sra_build_elt_assignment (elt, init);
2711 gimplify_and_add (stmt, list_p);
2714 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2715 elements under ELT with the contents of the initializer INIT. In addition,
2716 mark all assigned elements VISITED; this allows easy coordination with
2717 generate_element_zero. Return false if we found a case we couldn't
2721 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
2724 enum tree_code init_code;
2725 struct sra_elt *sub;
2727 unsigned HOST_WIDE_INT idx;
2728 tree value, purpose;
2730 /* We can be passed DECL_INITIAL of a static variable. It might have a
2731 conversion, which we strip off here. */
2732 STRIP_USELESS_TYPE_CONVERSION (init);
2733 init_code = TREE_CODE (init);
2737 if (elt->replacement)
2739 generate_one_element_init (elt, init, list_p);
2740 elt->visited = true;
2749 FOR_EACH_ACTUAL_CHILD (sub, elt)
2751 if (sub->element == integer_zero_node)
2752 t = (init_code == COMPLEX_EXPR
2753 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2755 t = (init_code == COMPLEX_EXPR
2756 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2757 result &= generate_element_init_1 (sub, t, list_p);
2762 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2764 if (TREE_CODE (purpose) == RANGE_EXPR)
2766 tree lower = TREE_OPERAND (purpose, 0);
2767 tree upper = TREE_OPERAND (purpose, 1);
2771 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2773 result &= generate_element_init_1 (sub, value, list_p);
2774 if (tree_int_cst_equal (lower, upper))
2776 lower = int_const_binop (PLUS_EXPR, lower,
2777 integer_one_node, true);
2782 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2784 result &= generate_element_init_1 (sub, value, list_p);
2790 elt->visited = true;
2797 /* A wrapper function for generate_element_init_1 that handles cleanup after
2801 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
2805 push_gimplify_context ();
2806 ret = generate_element_init_1 (elt, init, list_p);
2807 pop_gimplify_context (NULL);
2809 /* The replacement can expose previously unreferenced variables. */
2812 tree_stmt_iterator i;
2814 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
2815 find_new_referenced_vars (tsi_stmt_ptr (i));
2821 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
2822 has more than one edge, STMT will be replicated for each edge. Also,
2823 abnormal edges will be ignored. */
2826 insert_edge_copies (tree stmt, basic_block bb)
2833 FOR_EACH_EDGE (e, ei, bb->succs)
2835 /* We don't need to insert copies on abnormal edges. The
2836 value of the scalar replacement is not guaranteed to
2837 be valid through an abnormal edge. */
2838 if (!(e->flags & EDGE_ABNORMAL))
2842 bsi_insert_on_edge (e, stmt);
2846 bsi_insert_on_edge (e, unsave_expr_now (stmt));
2851 /* Helper function to insert LIST before BSI, and set up line number info. */
2854 sra_insert_before (block_stmt_iterator *bsi, tree list)
2856 tree stmt = bsi_stmt (*bsi);
2858 if (EXPR_HAS_LOCATION (stmt))
2859 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2860 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2863 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2866 sra_insert_after (block_stmt_iterator *bsi, tree list)
2868 tree stmt = bsi_stmt (*bsi);
2870 if (EXPR_HAS_LOCATION (stmt))
2871 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2873 if (stmt_ends_bb_p (stmt))
2874 insert_edge_copies (list, bsi->bb);
2876 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2879 /* Similarly, but replace the statement at BSI. */
2882 sra_replace (block_stmt_iterator *bsi, tree list)
2884 sra_insert_before (bsi, list);
2885 bsi_remove (bsi, false);
2886 if (bsi_end_p (*bsi))
2887 *bsi = bsi_last (bsi->bb);
2892 /* Data structure that bitfield_overlaps_p fills in with information
2893 about the element passed in and how much of it overlaps with the
2894 bit-range passed it to. */
2896 struct bitfield_overlap_info
2898 /* The bit-length of an element. */
2901 /* The bit-position of the element in its parent. */
2904 /* The number of bits of the element that overlap with the incoming
2908 /* The first bit of the element that overlaps with the incoming bit
2913 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2914 expression (referenced as BF below) accesses any of the bits in FLD,
2915 false if it doesn't. If DATA is non-null, its field_len and
2916 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2917 field_len, field_pos> (referenced as BFLD below) represents the
2918 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2919 overlap_pos> represents the portion of the entire field that
2920 overlaps with BF. */
2923 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2924 struct bitfield_overlap_info *data)
2929 if (TREE_CODE (fld->element) == FIELD_DECL)
2931 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2932 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2933 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2934 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2936 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2938 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2939 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2941 else if (TREE_CODE (fld->element) == INTEGER_CST)
2943 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2944 fpos = fold_convert (bitsizetype, fld->element);
2945 fpos = size_binop (MULT_EXPR, flen, fpos);
2950 gcc_assert (host_integerp (blen, 1)
2951 && host_integerp (bpos, 1)
2952 && host_integerp (flen, 1)
2953 && host_integerp (fpos, 1));
2955 ret = ((!tree_int_cst_lt (fpos, bpos)
2956 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2958 || (!tree_int_cst_lt (bpos, fpos)
2959 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2969 data->field_len = flen;
2970 data->field_pos = fpos;
2972 fend = size_binop (PLUS_EXPR, fpos, flen);
2973 bend = size_binop (PLUS_EXPR, bpos, blen);
2975 if (tree_int_cst_lt (bend, fend))
2976 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
2978 data->overlap_len = NULL;
2980 if (tree_int_cst_lt (fpos, bpos))
2982 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
2983 data->overlap_len = size_binop (MINUS_EXPR,
2990 data->overlap_pos = NULL;
2996 /* Add to LISTP a sequence of statements that copies BLEN bits between
2997 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
2998 and at bit BPOS of ELT. The direction of the copy is given by
3002 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
3003 tree *listp, tree blen, tree bpos,
3004 struct sra_elt *elt)
3006 struct sra_elt *fld;
3007 struct bitfield_overlap_info flp;
3009 FOR_EACH_ACTUAL_CHILD (fld, elt)
3013 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
3016 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3017 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3019 if (fld->replacement)
3021 tree infld, invar, st, type;
3023 infld = fld->replacement;
3025 type = TREE_TYPE (infld);
3026 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
3027 type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
3029 type = unsigned_type_for (type);
3031 if (TREE_CODE (infld) == BIT_FIELD_REF)
3033 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
3034 infld = TREE_OPERAND (infld, 0);
3036 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
3037 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3038 DECL_SIZE (fld->element)))
3040 fpos = size_binop (PLUS_EXPR, fpos,
3041 TYPE_SIZE (TREE_TYPE (infld)));
3042 fpos = size_binop (MINUS_EXPR, fpos,
3043 DECL_SIZE (fld->element));
3046 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3048 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3049 if (flp.overlap_pos)
3050 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3051 invar = size_binop (PLUS_EXPR, invar, vpos);
3053 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3056 st = sra_build_bf_assignment (invar, infld);
3058 st = sra_build_bf_assignment (infld, invar);
3060 append_to_statement_list (st, listp);
3064 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3065 sub = size_binop (PLUS_EXPR, vpos, sub);
3066 if (flp.overlap_pos)
3067 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3069 sra_explode_bitfield_assignment (var, sub, to_var, listp,
3075 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3076 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3077 into the full variable, and to LISTAFTERP, if non-NULL, statements
3078 that copy the (presumably modified) overlapping portions of the
3079 full variable back to the scalarized variables. */
3082 sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
3083 tree blen, tree bpos,
3084 struct sra_elt *elt)
3086 struct sra_elt *fld;
3087 struct bitfield_overlap_info flp;
3089 FOR_EACH_ACTUAL_CHILD (fld, elt)
3090 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3092 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3094 generate_copy_inout (fld, false, generate_element_ref (fld),
3096 mark_no_warning (fld);
3098 generate_copy_inout (fld, true, generate_element_ref (fld),
3103 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3104 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3106 sra_sync_for_bitfield_assignment (listbeforep, listafterp,
3112 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3113 if elt is scalar, or some occurrence of ELT that requires a complete
3114 aggregate. IS_OUTPUT is true if ELT is being modified. */
3117 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
3118 bool is_output, bool use_all)
3120 tree stmt = bsi_stmt (*bsi);
3123 if (elt->replacement)
3125 tree replacement = elt->replacement;
3127 /* If we have a replacement, then updating the reference is as
3128 simple as modifying the existing statement in place. */
3130 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3131 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3132 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3133 && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
3135 tree newstmt = sra_build_elt_assignment
3136 (elt, GIMPLE_STMT_OPERAND (stmt, 1));
3137 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3140 append_to_statement_list (newstmt, &list);
3143 sra_replace (bsi, newstmt);
3147 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3148 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3149 && &GIMPLE_STMT_OPERAND (stmt, 1) == expr_p)
3151 tree tmp = make_rename_temp
3152 (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)), "SR");
3153 tree newstmt = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3155 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3158 append_to_statement_list (newstmt, &list);
3161 sra_insert_before (bsi, newstmt);
3165 mark_all_v_defs (stmt);
3166 *expr_p = REPLDUP (replacement);
3169 else if (use_all && is_output
3170 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3171 && TREE_CODE (bfexpr
3172 = GIMPLE_STMT_OPERAND (stmt, 0)) == BIT_FIELD_REF
3173 && &TREE_OPERAND (bfexpr, 0) == expr_p
3174 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3175 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3177 tree listbefore = NULL, listafter = NULL;
3178 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3179 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3180 bool update = false;
3182 if (!elt->use_block_copy)
3184 tree type = TREE_TYPE (bfexpr);
3185 tree var = make_rename_temp (type, "SR"), tmp, st, vpos;
3187 GIMPLE_STMT_OPERAND (stmt, 0) = var;
3190 if (!TYPE_UNSIGNED (type))
3192 type = unsigned_type_for (type);
3193 tmp = make_rename_temp (type, "SR");
3194 st = build_gimple_modify_stmt (tmp,
3195 fold_convert (type, var));
3196 append_to_statement_list (st, &listafter);
3200 /* If VAR is wider than BLEN bits, it is padded at the
3201 most-significant end. We want to set VPOS such that
3202 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3203 least-significant BLEN bits of VAR. */
3204 if (BYTES_BIG_ENDIAN)
3205 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3207 vpos = bitsize_int (0);
3208 sra_explode_bitfield_assignment
3209 (var, vpos, false, &listafter, blen, bpos, elt);
3212 sra_sync_for_bitfield_assignment
3213 (&listbefore, &listafter, blen, bpos, elt);
3217 mark_all_v_defs (listbefore);
3218 sra_insert_before (bsi, listbefore);
3222 mark_all_v_defs (listafter);
3223 sra_insert_after (bsi, listafter);
3229 else if (use_all && !is_output
3230 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3231 && TREE_CODE (bfexpr
3232 = GIMPLE_STMT_OPERAND (stmt, 1)) == BIT_FIELD_REF
3233 && &TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0) == expr_p
3234 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3235 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3238 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3239 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3240 bool update = false;
3242 if (!elt->use_block_copy)
3244 tree type = TREE_TYPE (bfexpr);
3247 if (!TYPE_UNSIGNED (type))
3248 type = unsigned_type_for (type);
3250 var = make_rename_temp (type, "SR");
3252 append_to_statement_list (build_gimple_modify_stmt
3253 (var, build_int_cst_wide (type, 0, 0)),
3256 /* If VAR is wider than BLEN bits, it is padded at the
3257 most-significant end. We want to set VPOS such that
3258 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3259 least-significant BLEN bits of VAR. */
3260 if (BYTES_BIG_ENDIAN)
3261 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3263 vpos = bitsize_int (0);
3264 sra_explode_bitfield_assignment
3265 (var, vpos, true, &list, blen, bpos, elt);
3267 GIMPLE_STMT_OPERAND (stmt, 1) = var;
3271 sra_sync_for_bitfield_assignment
3272 (&list, NULL, blen, bpos, elt);
3276 mark_all_v_defs (list);
3277 sra_insert_before (bsi, list);
3287 /* Otherwise we need some copies. If ELT is being read, then we
3288 want to store all (modified) sub-elements back into the
3289 structure before the reference takes place. If ELT is being
3290 written, then we want to load the changed values back into
3291 our shadow variables. */
3292 /* ??? We don't check modified for reads, we just always write all of
3293 the values. We should be able to record the SSA number of the VOP
3294 for which the values were last read. If that number matches the
3295 SSA number of the VOP in the current statement, then we needn't
3296 emit an assignment. This would also eliminate double writes when
3297 a structure is passed as more than one argument to a function call.
3298 This optimization would be most effective if sra_walk_function
3299 processed the blocks in dominator order. */
3301 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
3304 mark_all_v_defs (list);
3306 sra_insert_after (bsi, list);
3309 sra_insert_before (bsi, list);
3311 mark_no_warning (elt);
3316 /* Scalarize a COPY. To recap, this is an assignment statement between
3317 two scalarizable references, LHS_ELT and RHS_ELT. */
3320 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3321 block_stmt_iterator *bsi)
3325 if (lhs_elt->replacement && rhs_elt->replacement)
3327 /* If we have two scalar operands, modify the existing statement. */
3328 stmt = bsi_stmt (*bsi);
3330 /* See the commentary in sra_walk_function concerning
3331 RETURN_EXPR, and why we should never see one here. */
3332 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3334 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
3335 GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
3338 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3340 /* If either side requires a block copy, then sync the RHS back
3341 to the original structure, leave the original assignment
3342 statement (which will perform the block copy), then load the
3343 LHS values out of its now-updated original structure. */
3344 /* ??? Could perform a modified pair-wise element copy. That
3345 would at least allow those elements that are instantiated in
3346 both structures to be optimized well. */
3349 generate_copy_inout (rhs_elt, false,
3350 generate_element_ref (rhs_elt), &list);
3353 mark_all_v_defs (list);
3354 sra_insert_before (bsi, list);
3358 generate_copy_inout (lhs_elt, true,
3359 generate_element_ref (lhs_elt), &list);
3362 mark_all_v_defs (list);
3363 sra_insert_after (bsi, list);
3368 /* Otherwise both sides must be fully instantiated. In which
3369 case perform pair-wise element assignments and replace the
3370 original block copy statement. */
3372 stmt = bsi_stmt (*bsi);
3373 mark_all_v_defs (stmt);
3376 generate_element_copy (lhs_elt, rhs_elt, &list);
3378 mark_all_v_defs (list);
3379 sra_replace (bsi, list);
3383 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3384 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3385 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3389 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
3392 tree list = NULL, init_list = NULL;
3394 /* Generate initialization statements for all members extant in the RHS. */
3397 /* Unshare the expression just in case this is from a decl's initial. */
3398 rhs = unshare_expr (rhs);
3399 result = generate_element_init (lhs_elt, rhs, &init_list);
3402 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3403 a zero value. Initialize the rest of the instantiated elements. */
3404 generate_element_zero (lhs_elt, &list);
3405 append_to_statement_list (init_list, &list);
3409 /* If we failed to convert the entire initializer, then we must
3410 leave the structure assignment in place and must load values
3411 from the structure into the slots for which we did not find
3412 constants. The easiest way to do this is to generate a complete
3413 copy-out, and then follow that with the constant assignments
3414 that we were able to build. DCE will clean things up. */
3416 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3418 append_to_statement_list (list, &list0);
3422 if (lhs_elt->use_block_copy || !result)
3424 /* Since LHS is not fully instantiated, we must leave the structure
3425 assignment in place. Treating this case differently from a USE
3426 exposes constants to later optimizations. */
3429 mark_all_v_defs (list);
3430 sra_insert_after (bsi, list);
3435 /* The LHS is fully instantiated. The list of initializations
3436 replaces the original structure assignment. */
3438 mark_all_v_defs (bsi_stmt (*bsi));
3439 mark_all_v_defs (list);
3440 sra_replace (bsi, list);
3444 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3445 on all INDIRECT_REFs. */
3448 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3452 if (TREE_CODE (t) == INDIRECT_REF)
3454 TREE_THIS_NOTRAP (t) = 1;
3457 else if (IS_TYPE_OR_DECL_P (t))
3463 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3464 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3465 if ELT is on the left-hand side. */
3468 scalarize_ldst (struct sra_elt *elt, tree other,
3469 block_stmt_iterator *bsi, bool is_output)
3471 /* Shouldn't have gotten called for a scalar. */
3472 gcc_assert (!elt->replacement);
3474 if (elt->use_block_copy)
3476 /* Since ELT is not fully instantiated, we have to leave the
3477 block copy in place. Treat this as a USE. */
3478 scalarize_use (elt, NULL, bsi, is_output, false);
3482 /* The interesting case is when ELT is fully instantiated. In this
3483 case we can have each element stored/loaded directly to/from the
3484 corresponding slot in OTHER. This avoids a block copy. */
3486 tree list = NULL, stmt = bsi_stmt (*bsi);
3488 mark_all_v_defs (stmt);
3489 generate_copy_inout (elt, is_output, other, &list);
3491 mark_all_v_defs (list);
3493 /* Preserve EH semantics. */
3494 if (stmt_ends_bb_p (stmt))
3496 tree_stmt_iterator tsi;
3497 tree first, blist = NULL;
3498 bool thr = tree_could_throw_p (stmt);
3500 /* If the last statement of this BB created an EH edge
3501 before scalarization, we have to locate the first
3502 statement that can throw in the new statement list and
3503 use that as the last statement of this BB, such that EH
3504 semantics is preserved. All statements up to this one
3505 are added to the same BB. All other statements in the
3506 list will be added to normal outgoing edges of the same
3507 BB. If they access any memory, it's the same memory, so
3508 we can assume they won't throw. */
3509 tsi = tsi_start (list);
3510 for (first = tsi_stmt (tsi);
3511 thr && !tsi_end_p (tsi) && !tree_could_throw_p (first);
3512 first = tsi_stmt (tsi))
3515 append_to_statement_list (first, &blist);
3518 /* Extract the first remaining statement from LIST, this is
3519 the EH statement if there is one. */
3523 sra_insert_before (bsi, blist);
3525 /* Replace the old statement with this new representative. */
3526 bsi_replace (bsi, first, true);
3528 if (!tsi_end_p (tsi))
3530 /* If any reference would trap, then they all would. And more
3531 to the point, the first would. Therefore none of the rest
3532 will trap since the first didn't. Indicate this by
3533 iterating over the remaining statements and set
3534 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3537 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
3540 while (!tsi_end_p (tsi));
3542 insert_edge_copies (list, bsi->bb);
3546 sra_replace (bsi, list);
3550 /* Generate initializations for all scalarizable parameters. */
3553 scalarize_parms (void)
3559 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3561 tree var = referenced_var (i);
3562 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3563 generate_copy_inout (elt, true, var, &list);
3568 insert_edge_copies (list, ENTRY_BLOCK_PTR);
3569 mark_all_v_defs (list);
3573 /* Entry point to phase 4. Update the function to match replacements. */
3576 scalarize_function (void)
3578 static const struct sra_walk_fns fns = {
3579 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3582 sra_walk_function (&fns);
3584 bsi_commit_edge_inserts ();
3588 /* Debug helper function. Print ELT in a nice human-readable format. */
3591 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3593 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3595 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3596 dump_sra_elt_name (f, elt->parent);
3601 dump_sra_elt_name (f, elt->parent);
3602 if (DECL_P (elt->element))
3604 if (TREE_CODE (elt->element) == FIELD_DECL)
3606 print_generic_expr (f, elt->element, dump_flags);
3608 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3609 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3610 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3611 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3612 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3613 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3614 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3615 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3617 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3618 TREE_INT_CST_LOW (elt->element));
3622 /* Likewise, but callable from the debugger. */
3625 debug_sra_elt_name (struct sra_elt *elt)
3627 dump_sra_elt_name (stderr, elt);
3628 fputc ('\n', stderr);
3632 sra_init_cache (void)
3634 if (sra_type_decomp_cache)
3637 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3638 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3641 /* Main entry point. */
3646 /* Initialize local variables. */
3648 gcc_obstack_init (&sra_obstack);
3649 sra_candidates = BITMAP_ALLOC (NULL);
3650 needs_copy_in = BITMAP_ALLOC (NULL);
3652 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3654 /* Scan. If we find anything, instantiate and scalarize. */
3655 if (find_candidates_for_sra ())
3658 decide_instantiations ();
3659 scalarize_function ();
3660 if (!bitmap_empty_p (sra_candidates))
3661 todoflags |= TODO_rebuild_alias;
3664 /* Free allocated memory. */
3665 htab_delete (sra_map);
3667 BITMAP_FREE (sra_candidates);
3668 BITMAP_FREE (needs_copy_in);
3669 BITMAP_FREE (sra_type_decomp_cache);
3670 BITMAP_FREE (sra_type_inst_cache);
3671 obstack_free (&sra_obstack, NULL);
3676 tree_sra_early (void)
3684 return ret & ~TODO_rebuild_alias;
3690 return flag_tree_sra != 0;
3693 struct gimple_opt_pass pass_sra_early =
3698 gate_sra, /* gate */
3699 tree_sra_early, /* execute */
3702 0, /* static_pass_number */
3703 TV_TREE_SRA, /* tv_id */
3704 PROP_cfg | PROP_ssa, /* properties_required */
3705 0, /* properties_provided */
3706 0, /* properties_destroyed */
3707 0, /* todo_flags_start */
3711 | TODO_verify_ssa /* todo_flags_finish */
3715 struct gimple_opt_pass pass_sra =
3720 gate_sra, /* gate */
3721 tree_sra, /* execute */
3724 0, /* static_pass_number */
3725 TV_TREE_SRA, /* tv_id */
3726 PROP_cfg | PROP_ssa, /* properties_required */
3727 0, /* properties_provided */
3728 0, /* properties_destroyed */
3729 0, /* todo_flags_start */
3733 | TODO_verify_ssa /* todo_flags_finish */