1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache;
90 static bitmap sra_type_inst_cache;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt *parent;
98 struct sra_elt *groups;
99 struct sra_elt *children;
100 struct sra_elt *sibling;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
110 /* The type of the element. */
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies;
124 /* True if TYPE is scalar. */
127 /* True if this element is a group of members of its parent. */
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
141 /* A flag for use with/after random access traversals. */
144 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
147 /* 1 if the element is a field that is part of a block, 2 if the field
148 is the block itself, 0 if it's neither. */
149 char in_bitfld_block;
152 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
154 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
155 for ((CHILD) = (ELT)->is_group \
156 ? next_child_for_group (NULL, (ELT)) \
159 (CHILD) = (ELT)->is_group \
160 ? next_child_for_group ((CHILD), (ELT)) \
163 /* Helper function for above macro. Return next child in group. */
164 static struct sra_elt *
165 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
167 gcc_assert (group->is_group);
169 /* Find the next child in the parent. */
171 child = child->sibling;
173 child = group->parent->children;
175 /* Skip siblings that do not belong to the group. */
178 tree g_elt = group->element;
179 if (TREE_CODE (g_elt) == RANGE_EXPR)
181 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
182 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
188 child = child->sibling;
194 /* Random access to the child of a parent is performed by hashing.
195 This prevents quadratic behavior, and allows SRA to function
196 reasonably on larger records. */
197 static htab_t sra_map;
199 /* All structures are allocated out of the following obstack. */
200 static struct obstack sra_obstack;
202 /* Debugging functions. */
203 static void dump_sra_elt_name (FILE *, struct sra_elt *);
204 extern void debug_sra_elt_name (struct sra_elt *);
206 /* Forward declarations. */
207 static tree generate_element_ref (struct sra_elt *);
208 static gimple_seq sra_build_assignment (tree dst, tree src);
209 static void mark_all_v_defs_seq (gimple_seq);
212 /* Return true if DECL is an SRA candidate. */
215 is_sra_candidate_decl (tree decl)
217 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
220 /* Return true if TYPE is a scalar type. */
223 is_sra_scalar_type (tree type)
225 enum tree_code code = TREE_CODE (type);
226 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
227 || code == FIXED_POINT_TYPE
228 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
229 || code == POINTER_TYPE || code == OFFSET_TYPE
230 || code == REFERENCE_TYPE);
233 /* Return true if TYPE can be decomposed into a set of independent variables.
235 Note that this doesn't imply that all elements of TYPE can be
236 instantiated, just that if we decide to break up the type into
237 separate pieces that it can be done. */
240 sra_type_can_be_decomposed_p (tree type)
242 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
245 /* Avoid searching the same type twice. */
246 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
248 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
251 /* The type must have a definite nonzero size. */
252 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
253 || integer_zerop (TYPE_SIZE (type)))
256 /* The type must be a non-union aggregate. */
257 switch (TREE_CODE (type))
261 bool saw_one_field = false;
263 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
264 if (TREE_CODE (t) == FIELD_DECL)
266 /* Reject incorrectly represented bit fields. */
267 if (DECL_BIT_FIELD (t)
268 && INTEGRAL_TYPE_P (TREE_TYPE (t))
269 && (tree_low_cst (DECL_SIZE (t), 1)
270 != TYPE_PRECISION (TREE_TYPE (t))))
273 saw_one_field = true;
276 /* Record types must have at least one field. */
283 /* Array types must have a fixed lower and upper bound. */
284 t = TYPE_DOMAIN (type);
287 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
289 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
300 bitmap_set_bit (sra_type_decomp_cache, cache+0);
304 bitmap_set_bit (sra_type_decomp_cache, cache+1);
308 /* Returns true if the TYPE is one of the available va_list types.
309 Otherwise it returns false.
310 Note, that for multiple calling conventions there can be more
311 than just one va_list type present. */
314 is_va_list_type (tree type)
318 if (type == NULL_TREE)
320 h = targetm.canonical_va_list_type (type);
323 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (h))
328 /* Return true if DECL can be decomposed into a set of independent
329 (though not necessarily scalar) variables. */
332 decl_can_be_decomposed_p (tree var)
334 /* Early out for scalars. */
335 if (is_sra_scalar_type (TREE_TYPE (var)))
338 /* The variable must not be aliased. */
339 if (!is_gimple_non_addressable (var))
341 if (dump_file && (dump_flags & TDF_DETAILS))
343 fprintf (dump_file, "Cannot scalarize variable ");
344 print_generic_expr (dump_file, var, dump_flags);
345 fprintf (dump_file, " because it must live in memory\n");
350 /* The variable must not be volatile. */
351 if (TREE_THIS_VOLATILE (var))
353 if (dump_file && (dump_flags & TDF_DETAILS))
355 fprintf (dump_file, "Cannot scalarize variable ");
356 print_generic_expr (dump_file, var, dump_flags);
357 fprintf (dump_file, " because it is declared volatile\n");
362 /* We must be able to decompose the variable's type. */
363 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
365 if (dump_file && (dump_flags & TDF_DETAILS))
367 fprintf (dump_file, "Cannot scalarize variable ");
368 print_generic_expr (dump_file, var, dump_flags);
369 fprintf (dump_file, " because its type cannot be decomposed\n");
374 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
375 confuse tree-stdarg.c, and we won't be able to figure out which and
376 how many arguments are accessed. This really should be improved in
377 tree-stdarg.c, as the decomposition is truly a win. This could also
378 be fixed if the stdarg pass ran early, but this can't be done until
379 we've aliasing information early too. See PR 30791. */
380 if (early_sra && is_va_list_type (TREE_TYPE (var)))
386 /* Return true if TYPE can be *completely* decomposed into scalars. */
389 type_can_instantiate_all_elements (tree type)
391 if (is_sra_scalar_type (type))
393 if (!sra_type_can_be_decomposed_p (type))
396 switch (TREE_CODE (type))
400 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
403 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
405 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
408 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
409 if (TREE_CODE (f) == FIELD_DECL)
411 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
413 bitmap_set_bit (sra_type_inst_cache, cache+1);
418 bitmap_set_bit (sra_type_inst_cache, cache+0);
423 return type_can_instantiate_all_elements (TREE_TYPE (type));
433 /* Test whether ELT or some sub-element cannot be scalarized. */
436 can_completely_scalarize_p (struct sra_elt *elt)
440 if (elt->cannot_scalarize)
443 for (c = elt->children; c; c = c->sibling)
444 if (!can_completely_scalarize_p (c))
447 for (c = elt->groups; c; c = c->sibling)
448 if (!can_completely_scalarize_p (c))
455 /* A simplified tree hashing algorithm that only handles the types of
456 trees we expect to find in sra_elt->element. */
459 sra_hash_tree (tree t)
463 switch (TREE_CODE (t))
472 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
476 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
477 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
481 /* We can have types that are compatible, but have different member
482 lists, so we can't hash fields by ID. Use offsets instead. */
483 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
484 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
488 /* Don't take operand 0 into account, that's our parent. */
489 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
490 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
500 /* Hash function for type SRA_PAIR. */
503 sra_elt_hash (const void *x)
505 const struct sra_elt *const e = (const struct sra_elt *) x;
506 const struct sra_elt *p;
509 h = sra_hash_tree (e->element);
511 /* Take into account everything except bitfield blocks back up the
512 chain. Given that chain lengths are rarely very long, this
513 should be acceptable. If we truly identify this as a performance
514 problem, it should work to hash the pointer value
516 for (p = e->parent; p ; p = p->parent)
517 if (!p->in_bitfld_block)
518 h = (h * 65521) ^ sra_hash_tree (p->element);
523 /* Equality function for type SRA_PAIR. */
526 sra_elt_eq (const void *x, const void *y)
528 const struct sra_elt *const a = (const struct sra_elt *) x;
529 const struct sra_elt *const b = (const struct sra_elt *) y;
531 const struct sra_elt *ap = a->parent;
532 const struct sra_elt *bp = b->parent;
535 while (ap->in_bitfld_block)
538 while (bp->in_bitfld_block)
549 if (TREE_CODE (ae) != TREE_CODE (be))
552 switch (TREE_CODE (ae))
557 /* These are all pointer unique. */
561 /* Integers are not pointer unique, so compare their values. */
562 return tree_int_cst_equal (ae, be);
566 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
567 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
570 /* Fields are unique within a record, but not between
571 compatible records. */
572 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
574 return fields_compatible_p (ae, be);
578 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
579 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
586 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
587 may be null, in which case CHILD must be a DECL. */
589 static struct sra_elt *
590 lookup_element (struct sra_elt *parent, tree child, tree type,
591 enum insert_option insert)
593 struct sra_elt dummy;
594 struct sra_elt **slot;
598 dummy.parent = parent->is_group ? parent->parent : parent;
601 dummy.element = child;
603 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
604 if (!slot && insert == NO_INSERT)
608 if (!elt && insert == INSERT)
610 *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
611 memset (elt, 0, sizeof (*elt));
613 elt->parent = parent;
614 elt->element = child;
616 elt->is_scalar = is_sra_scalar_type (type);
620 if (IS_ELEMENT_FOR_GROUP (elt->element))
622 elt->is_group = true;
623 elt->sibling = parent->groups;
624 parent->groups = elt;
628 elt->sibling = parent->children;
629 parent->children = elt;
633 /* If this is a parameter, then if we want to scalarize, we have
634 one copy from the true function parameter. Count it now. */
635 if (TREE_CODE (child) == PARM_DECL)
638 bitmap_set_bit (needs_copy_in, DECL_UID (child));
645 /* Create or return the SRA_ELT structure for EXPR if the expression
646 refers to a scalarizable variable. */
648 static struct sra_elt *
649 maybe_lookup_element_for_expr (tree expr)
654 switch (TREE_CODE (expr))
659 if (is_sra_candidate_decl (expr))
660 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
664 /* We can't scalarize variable array indices. */
665 if (in_array_bounds_p (expr))
666 child = TREE_OPERAND (expr, 1);
671 case ARRAY_RANGE_REF:
672 /* We can't scalarize variable array indices. */
673 if (range_in_array_bounds_p (expr))
675 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
676 child = build2 (RANGE_EXPR, integer_type_node,
677 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
685 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
686 /* Don't look through unions. */
687 if (TREE_CODE (type) != RECORD_TYPE)
689 /* Neither through variable-sized records. */
690 if (TYPE_SIZE (type) == NULL_TREE
691 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
693 child = TREE_OPERAND (expr, 1);
698 child = integer_zero_node;
701 child = integer_one_node;
708 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
710 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
715 /* Functions to walk just enough of the tree to see all scalarizable
716 references, and categorize them. */
718 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
719 various kinds of references seen. In all cases, *GSI is an iterator
720 pointing to the statement being processed. */
723 /* Invoked when ELT is required as a unit. Note that ELT might refer to
724 a leaf node, in which case this is a simple scalar reference. *EXPR_P
725 points to the location of the expression. IS_OUTPUT is true if this
726 is a left-hand-side reference. USE_ALL is true if we saw something we
727 couldn't quite identify and had to force the use of the entire object. */
728 void (*use) (struct sra_elt *elt, tree *expr_p,
729 gimple_stmt_iterator *gsi, bool is_output, bool use_all);
731 /* Invoked when we have a copy between two scalarizable references. */
732 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
733 gimple_stmt_iterator *gsi);
735 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
736 in which case it should be treated as an empty CONSTRUCTOR. */
737 void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
739 /* Invoked when we have a copy between one scalarizable reference ELT
740 and one non-scalarizable reference OTHER without side-effects.
741 IS_OUTPUT is true if ELT is on the left-hand side. */
742 void (*ldst) (struct sra_elt *elt, tree other,
743 gimple_stmt_iterator *gsi, bool is_output);
745 /* True during phase 2, false during phase 4. */
746 /* ??? This is a hack. */
750 #ifdef ENABLE_CHECKING
751 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
754 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
755 void *data ATTRIBUTE_UNUSED)
758 enum tree_code code = TREE_CODE (t);
760 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
763 if (is_sra_candidate_decl (t))
773 /* Walk most expressions looking for a scalarizable aggregate.
774 If we find one, invoke FNS->USE. */
777 sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
778 const struct sra_walk_fns *fns)
782 bool disable_scalarization = false;
783 bool use_all_p = false;
785 /* We're looking to collect a reference expression between EXPR and INNER,
786 such that INNER is a scalarizable decl and all other nodes through EXPR
787 are references that we can scalarize. If we come across something that
788 we can't scalarize, we reset EXPR. This has the effect of making it
789 appear that we're referring to the larger expression as a whole. */
792 switch (TREE_CODE (inner))
797 /* If there is a scalarizable decl at the bottom, then process it. */
798 if (is_sra_candidate_decl (inner))
800 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
801 if (disable_scalarization)
802 elt->cannot_scalarize = true;
804 fns->use (elt, expr_p, gsi, is_output, use_all_p);
809 /* Non-constant index means any member may be accessed. Prevent the
810 expression from being scalarized. If we were to treat this as a
811 reference to the whole array, we can wind up with a single dynamic
812 index reference inside a loop being overridden by several constant
813 index references during loop setup. It's possible that this could
814 be avoided by using dynamic usage counts based on BB trip counts
815 (based on loop analysis or profiling), but that hardly seems worth
817 /* ??? Hack. Figure out how to push this into the scan routines
818 without duplicating too much code. */
819 if (!in_array_bounds_p (inner))
821 disable_scalarization = true;
824 /* ??? Are we assured that non-constant bounds and stride will have
825 the same value everywhere? I don't think Fortran will... */
826 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
828 inner = TREE_OPERAND (inner, 0);
831 case ARRAY_RANGE_REF:
832 if (!range_in_array_bounds_p (inner))
834 disable_scalarization = true;
837 /* ??? See above non-constant bounds and stride . */
838 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
840 inner = TREE_OPERAND (inner, 0);
845 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
846 /* Don't look through unions. */
847 if (TREE_CODE (type) != RECORD_TYPE)
849 /* Neither through variable-sized records. */
850 if (TYPE_SIZE (type) == NULL_TREE
851 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
853 inner = TREE_OPERAND (inner, 0);
859 inner = TREE_OPERAND (inner, 0);
863 /* A bit field reference to a specific vector is scalarized but for
864 ones for inputs need to be marked as used on the left hand size so
865 when we scalarize it, we can mark that variable as non renamable. */
867 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
870 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
872 elt->is_vector_lhs = true;
875 /* A bit field reference (access to *multiple* fields simultaneously)
876 is not currently scalarized. Consider this an access to the full
877 outer element, to which walk_tree will bring us next. */
881 /* Similarly, a nop explicitly wants to look at an object in a
882 type other than the one we've scalarized. */
885 case VIEW_CONVERT_EXPR:
886 /* Likewise for a view conversion, but with an additional twist:
887 it can be on the LHS and, in this case, an access to the full
888 outer element would mean a killing def. So we need to punt
889 if we haven't already a full access to the current element,
890 because we cannot pretend to have a killing def if we only
891 have a partial access at some level. */
892 if (is_output && !use_all_p && inner != expr)
893 disable_scalarization = true;
897 /* This is a transparent wrapper. The entire inner expression really
902 expr_p = &TREE_OPERAND (inner, 0);
903 inner = expr = *expr_p;
908 #ifdef ENABLE_CHECKING
909 /* Validate that we're not missing any references. */
910 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
916 /* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
917 If we find one, invoke FNS->USE. */
920 sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
921 const struct sra_walk_fns *fns)
924 int nargs = gimple_call_num_args (stmt);
926 for (i = 0; i < nargs; i++)
927 sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
929 if (gimple_call_lhs (stmt))
930 sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
933 /* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
934 aggregates. If we find one, invoke FNS->USE. */
937 sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
938 const struct sra_walk_fns *fns)
941 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
942 sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
943 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
944 sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
947 /* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately. */
950 sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
951 const struct sra_walk_fns *fns)
953 struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
956 /* If there is more than 1 element on the RHS, only walk the lhs. */
957 if (!gimple_assign_single_p (stmt))
959 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
963 lhs = gimple_assign_lhs (stmt);
964 rhs = gimple_assign_rhs1 (stmt);
965 lhs_elt = maybe_lookup_element_for_expr (lhs);
966 rhs_elt = maybe_lookup_element_for_expr (rhs);
968 /* If both sides are scalarizable, this is a COPY operation. */
969 if (lhs_elt && rhs_elt)
971 fns->copy (lhs_elt, rhs_elt, gsi);
975 /* If the RHS is scalarizable, handle it. There are only two cases. */
978 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
979 fns->ldst (rhs_elt, lhs, gsi, false);
981 fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
984 /* If it isn't scalarizable, there may be scalarizable variables within, so
985 check for a call or else walk the RHS to see if we need to do any
986 copy-in operations. We need to do it before the LHS is scalarized so
987 that the statements get inserted in the proper place, before any
988 copy-out operations. */
990 sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
992 /* Likewise, handle the LHS being scalarizable. We have cases similar
993 to those above, but also want to handle RHS being constant. */
996 /* If this is an assignment from a constant, or constructor, then
997 we have access to all of the elements individually. Invoke INIT. */
998 if (TREE_CODE (rhs) == COMPLEX_EXPR
999 || TREE_CODE (rhs) == COMPLEX_CST
1000 || TREE_CODE (rhs) == CONSTRUCTOR)
1001 fns->init (lhs_elt, rhs, gsi);
1003 /* If this is an assignment from read-only memory, treat this as if
1004 we'd been passed the constructor directly. Invoke INIT. */
1005 else if (TREE_CODE (rhs) == VAR_DECL
1006 && TREE_STATIC (rhs)
1007 && !DECL_EXTERNAL (rhs)
1008 && TREE_READONLY (rhs)
1009 && targetm.binds_local_p (rhs))
1010 fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
1012 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1013 The lvalue requirement prevents us from trying to directly scalarize
1014 the result of a function call. Which would result in trying to call
1015 the function multiple times, and other evil things. */
1016 else if (!lhs_elt->is_scalar
1017 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1018 fns->ldst (lhs_elt, rhs, gsi, true);
1020 /* Otherwise we're being used in some context that requires the
1021 aggregate to be seen as a whole. Invoke USE. */
1023 fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
1026 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1027 whole is not a scalarizable reference. There may be occurrences of
1028 scalarizable variables within, which implies a USE. */
1030 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
1033 /* Entry point to the walk functions. Search the entire function,
1034 invoking the callbacks in FNS on each of the references to
1035 scalarizable variables. */
1038 sra_walk_function (const struct sra_walk_fns *fns)
1041 gimple_stmt_iterator si, ni;
1043 /* ??? Phase 4 could derive some benefit to walking the function in
1044 dominator tree order. */
1047 for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
1051 stmt = gsi_stmt (si);
1056 /* If the statement does not reference memory, then it doesn't
1057 make any structure references that we care about. */
1058 if (!gimple_references_memory_p (stmt))
1061 switch (gimple_code (stmt))
1064 /* If we have "return <retval>" then the return value is
1065 already exposed for our pleasure. Walk it as a USE to
1066 force all the components back in place for the return.
1068 if (gimple_return_retval (stmt) == NULL_TREE)
1071 sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
1076 sra_walk_gimple_assign (stmt, &si, fns);
1079 sra_walk_gimple_call (stmt, &si, fns);
1082 sra_walk_gimple_asm (stmt, &si, fns);
1091 /* Phase One: Scan all referenced variables in the program looking for
1092 structures that could be decomposed. */
1095 find_candidates_for_sra (void)
1097 bool any_set = false;
1099 referenced_var_iterator rvi;
1101 FOR_EACH_REFERENCED_VAR (var, rvi)
1103 if (decl_can_be_decomposed_p (var))
1105 bitmap_set_bit (sra_candidates, DECL_UID (var));
1114 /* Phase Two: Scan all references to scalarizable variables. Count the
1115 number of times they are used or copied respectively. */
1117 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1118 considered a copy, because we can decompose the reference such that
1119 the sub-elements needn't be contiguous. */
1122 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1123 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1124 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1130 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1131 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1133 lhs_elt->n_copies += 1;
1134 rhs_elt->n_copies += 1;
1138 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1139 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1141 lhs_elt->n_copies += 1;
1145 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1146 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1147 bool is_output ATTRIBUTE_UNUSED)
1152 /* Dump the values we collected during the scanning phase. */
1155 scan_dump (struct sra_elt *elt)
1159 dump_sra_elt_name (dump_file, elt);
1160 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1162 for (c = elt->children; c ; c = c->sibling)
1165 for (c = elt->groups; c ; c = c->sibling)
1169 /* Entry point to phase 2. Scan the entire function, building up
1170 scalarization data structures, recording copies and uses. */
1173 scan_function (void)
1175 static const struct sra_walk_fns fns = {
1176 scan_use, scan_copy, scan_init, scan_ldst, true
1180 sra_walk_function (&fns);
1182 if (dump_file && (dump_flags & TDF_DETAILS))
1186 fputs ("\nScan results:\n", dump_file);
1187 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1189 tree var = referenced_var (i);
1190 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1194 fputc ('\n', dump_file);
1198 /* Phase Three: Make decisions about which variables to scalarize, if any.
1199 All elements to be scalarized have replacement variables made for them. */
1201 /* A subroutine of build_element_name. Recursively build the element
1202 name on the obstack. */
1205 build_element_name_1 (struct sra_elt *elt)
1212 build_element_name_1 (elt->parent);
1213 obstack_1grow (&sra_obstack, '$');
1215 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1217 if (elt->element == integer_zero_node)
1218 obstack_grow (&sra_obstack, "real", 4);
1220 obstack_grow (&sra_obstack, "imag", 4);
1226 if (TREE_CODE (t) == INTEGER_CST)
1228 /* ??? Eh. Don't bother doing double-wide printing. */
1229 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1230 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1232 else if (TREE_CODE (t) == BIT_FIELD_REF)
1234 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1235 tree_low_cst (TREE_OPERAND (t, 2), 1));
1236 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1237 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1238 tree_low_cst (TREE_OPERAND (t, 1), 1));
1239 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1243 tree name = DECL_NAME (t);
1245 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1246 IDENTIFIER_LENGTH (name));
1249 sprintf (buffer, "D%u", DECL_UID (t));
1250 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1255 /* Construct a pretty variable name for an element's replacement variable.
1256 The name is built on the obstack. */
1259 build_element_name (struct sra_elt *elt)
1261 build_element_name_1 (elt);
1262 obstack_1grow (&sra_obstack, '\0');
1263 return XOBFINISH (&sra_obstack, char *);
1266 /* Instantiate an element as an independent variable. */
1269 instantiate_element (struct sra_elt *elt)
1271 struct sra_elt *base_elt;
1273 bool nowarn = TREE_NO_WARNING (elt->element);
1275 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1277 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1278 base = base_elt->element;
1280 elt->replacement = var = make_rename_temp (elt->type, "SR");
1282 if (DECL_P (elt->element)
1283 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1285 DECL_SIZE (var) = DECL_SIZE (elt->element);
1286 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1288 elt->in_bitfld_block = 1;
1289 elt->replacement = fold_build3 (BIT_FIELD_REF, elt->type, var,
1292 ? size_binop (MINUS_EXPR,
1293 TYPE_SIZE (elt->type),
1298 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1299 they are not a gimple register. */
1300 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1301 DECL_GIMPLE_REG_P (var) = 0;
1303 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1304 DECL_ARTIFICIAL (var) = 1;
1306 if (TREE_THIS_VOLATILE (elt->type))
1308 TREE_THIS_VOLATILE (var) = 1;
1309 TREE_SIDE_EFFECTS (var) = 1;
1312 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1314 char *pretty_name = build_element_name (elt);
1315 DECL_NAME (var) = get_identifier (pretty_name);
1316 obstack_free (&sra_obstack, pretty_name);
1318 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1319 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1321 DECL_IGNORED_P (var) = 0;
1322 TREE_NO_WARNING (var) = nowarn;
1326 DECL_IGNORED_P (var) = 1;
1327 /* ??? We can't generate any warning that would be meaningful. */
1328 TREE_NO_WARNING (var) = 1;
1331 /* Zero-initialize bit-field scalarization variables, to avoid
1332 triggering undefined behavior. */
1333 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1334 || (var != elt->replacement
1335 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1337 gimple_seq init = sra_build_assignment (var,
1338 fold_convert (TREE_TYPE (var),
1341 insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
1342 mark_all_v_defs_seq (init);
1347 fputs (" ", dump_file);
1348 dump_sra_elt_name (dump_file, elt);
1349 fputs (" -> ", dump_file);
1350 print_generic_expr (dump_file, var, dump_flags);
1351 fputc ('\n', dump_file);
1355 /* Make one pass across an element tree deciding whether or not it's
1356 profitable to instantiate individual leaf scalars.
1358 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1359 fields all the way up the tree. */
1362 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1363 unsigned int parent_copies)
1365 if (dump_file && !elt->parent)
1367 fputs ("Initial instantiation for ", dump_file);
1368 dump_sra_elt_name (dump_file, elt);
1369 fputc ('\n', dump_file);
1372 if (elt->cannot_scalarize)
1377 /* The decision is simple: instantiate if we're used more frequently
1378 than the parent needs to be seen as a complete unit. */
1379 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1380 instantiate_element (elt);
1384 struct sra_elt *c, *group;
1385 unsigned int this_uses = elt->n_uses + parent_uses;
1386 unsigned int this_copies = elt->n_copies + parent_copies;
1388 /* Consider groups of sub-elements as weighing in favour of
1389 instantiation whatever their size. */
1390 for (group = elt->groups; group ; group = group->sibling)
1391 FOR_EACH_ACTUAL_CHILD (c, group)
1393 c->n_uses += group->n_uses;
1394 c->n_copies += group->n_copies;
1397 for (c = elt->children; c ; c = c->sibling)
1398 decide_instantiation_1 (c, this_uses, this_copies);
1402 /* Compute the size and number of all instantiated elements below ELT.
1403 We will only care about this if the size of the complete structure
1404 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1407 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1409 if (elt->replacement)
1411 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1417 unsigned int count = 0;
1419 for (c = elt->children; c ; c = c->sibling)
1420 count += sum_instantiated_sizes (c, sizep);
1426 /* Instantiate fields in ELT->TYPE that are not currently present as
1429 static void instantiate_missing_elements (struct sra_elt *elt);
1431 static struct sra_elt *
1432 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1434 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1437 if (sub->replacement == NULL)
1438 instantiate_element (sub);
1441 instantiate_missing_elements (sub);
1445 /* Obtain the canonical type for field F of ELEMENT. */
1448 canon_type_for_field (tree f, tree element)
1450 tree field_type = TREE_TYPE (f);
1452 /* canonicalize_component_ref() unwidens some bit-field types (not
1453 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1454 may introduce type mismatches. */
1455 if (INTEGRAL_TYPE_P (field_type)
1456 && DECL_MODE (f) != TYPE_MODE (field_type))
1457 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1466 /* Look for adjacent fields of ELT starting at F that we'd like to
1467 scalarize as a single variable. Return the last field of the
1471 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1474 unsigned HOST_WIDE_INT align, bit, size, alchk;
1475 enum machine_mode mode;
1476 tree first = f, prev;
1478 struct sra_elt *block;
1480 /* Point fields are typically best handled as standalone entities. */
1481 if (POINTER_TYPE_P (TREE_TYPE (f)))
1484 if (!is_sra_scalar_type (TREE_TYPE (f))
1485 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1486 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1487 || !host_integerp (DECL_SIZE (f), 1)
1488 || lookup_element (elt, f, NULL, NO_INSERT))
1493 /* For complex and array objects, there are going to be integer
1494 literals as child elements. In this case, we can't just take the
1495 alignment and mode of the decl, so we instead rely on the element
1498 ??? We could try to infer additional alignment from the full
1499 object declaration and the location of the sub-elements we're
1501 for (count = 0; !DECL_P (block->element); count++)
1502 block = block->parent;
1504 align = DECL_ALIGN (block->element);
1505 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1509 type = TREE_TYPE (block->element);
1511 type = TREE_TYPE (type);
1513 align = TYPE_ALIGN (type);
1514 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1520 /* Coalescing wider fields is probably pointless and
1522 if (align > BITS_PER_WORD)
1523 align = BITS_PER_WORD;
1525 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1526 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1527 size = tree_low_cst (DECL_SIZE (f), 1);
1532 if ((bit & alchk) != ((bit + size - 1) & alchk))
1535 /* Find adjacent fields in the same alignment word. */
1537 for (prev = f, f = TREE_CHAIN (f);
1538 f && TREE_CODE (f) == FIELD_DECL
1539 && is_sra_scalar_type (TREE_TYPE (f))
1540 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1541 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1542 && host_integerp (DECL_SIZE (f), 1)
1543 && !lookup_element (elt, f, NULL, NO_INSERT);
1544 prev = f, f = TREE_CHAIN (f))
1546 unsigned HOST_WIDE_INT nbit, nsize;
1548 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1549 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1550 nsize = tree_low_cst (DECL_SIZE (f), 1);
1552 if (bit + size == nbit)
1554 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1556 /* If we're at an alignment boundary, don't bother
1557 growing alignment such that we can include this next
1560 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1563 align = GET_MODE_BITSIZE (DECL_MODE (f));
1567 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1572 else if (nbit + nsize == bit)
1574 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1577 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1580 align = GET_MODE_BITSIZE (DECL_MODE (f));
1584 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1599 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1601 /* Try to widen the bit range so as to cover padding bits as well. */
1603 if ((bit & ~alchk) || size != align)
1605 unsigned HOST_WIDE_INT mbit = bit & alchk;
1606 unsigned HOST_WIDE_INT msize = align;
1608 for (f = TYPE_FIELDS (elt->type);
1609 f; f = TREE_CHAIN (f))
1611 unsigned HOST_WIDE_INT fbit, fsize;
1613 /* Skip the fields from first to prev. */
1620 if (!(TREE_CODE (f) == FIELD_DECL
1621 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1622 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1625 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1626 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1628 /* If we're past the selected word, we're fine. */
1629 if ((bit & alchk) < (fbit & alchk))
1632 if (host_integerp (DECL_SIZE (f), 1))
1633 fsize = tree_low_cst (DECL_SIZE (f), 1);
1635 /* Assume a variable-sized field takes up all space till
1636 the end of the word. ??? Endianness issues? */
1637 fsize = align - (fbit & alchk);
1639 if ((fbit & alchk) < (bit & alchk))
1641 /* A large field might start at a previous word and
1642 extend into the selected word. Exclude those
1643 bits. ??? Endianness issues? */
1644 HOST_WIDE_INT diff = fbit + fsize - mbit;
1654 /* Non-overlapping, great. */
1655 if (fbit + fsize <= mbit
1656 || mbit + msize <= fbit)
1661 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1665 else if (fbit > mbit)
1666 msize -= (mbit + msize - fbit);
1676 /* Now we know the bit range we're interested in. Find the smallest
1677 machine mode we can use to access it. */
1679 for (mode = smallest_mode_for_size (size, MODE_INT);
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 gcc_assert (mode != VOIDmode);
1685 alchk = GET_MODE_PRECISION (mode) - 1;
1688 if ((bit & alchk) == ((bit + size - 1) & alchk))
1692 gcc_assert (~alchk < align);
1694 /* Create the field group as a single variable. */
1696 /* We used to create a type for the mode above, but size turns
1697 to be out not of mode-size. As we need a matching type
1698 to build a BIT_FIELD_REF, use a nonstandard integer type as
1700 type = lang_hooks.types.type_for_size (size, 1);
1701 if (!type || TYPE_PRECISION (type) != size)
1702 type = build_nonstandard_integer_type (size, 1);
1704 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1705 bitsize_int (size), bitsize_int (bit));
1707 block = instantiate_missing_elements_1 (elt, var, type);
1708 gcc_assert (block && block->is_scalar);
1710 var = block->replacement;
1711 block->in_bitfld_block = 2;
1713 /* Add the member fields to the group, such that they access
1714 portions of the group variable. */
1716 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1718 tree field_type = canon_type_for_field (f, elt->element);
1719 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1721 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1723 fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
1724 bitsize_int (TYPE_PRECISION (field_type)),
1726 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1729 (DECL_FIELD_BIT_OFFSET (f)))
1731 (TREE_OPERAND (block->element, 2))))
1733 fld->in_bitfld_block = 1;
1740 instantiate_missing_elements (struct sra_elt *elt)
1742 tree type = elt->type;
1744 switch (TREE_CODE (type))
1749 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1750 if (TREE_CODE (f) == FIELD_DECL)
1752 tree last = try_instantiate_multiple_fields (elt, f);
1760 instantiate_missing_elements_1 (elt, f,
1761 canon_type_for_field
1769 tree i, max, subtype;
1771 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1772 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1773 subtype = TREE_TYPE (type);
1777 instantiate_missing_elements_1 (elt, i, subtype);
1778 if (tree_int_cst_equal (i, max))
1780 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1787 type = TREE_TYPE (type);
1788 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1789 instantiate_missing_elements_1 (elt, integer_one_node, type);
1797 /* Return true if there is only one non aggregate field in the record, TYPE.
1798 Return false otherwise. */
1801 single_scalar_field_in_record_p (tree type)
1805 if (TREE_CODE (type) != RECORD_TYPE)
1808 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1809 if (TREE_CODE (field) == FIELD_DECL)
1813 if (num_fields == 2)
1816 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1823 /* Make one pass across an element tree deciding whether to perform block
1824 or element copies. If we decide on element copies, instantiate all
1825 elements. Return true if there are any instantiated sub-elements. */
1828 decide_block_copy (struct sra_elt *elt)
1833 /* We shouldn't be invoked on groups of sub-elements as they must
1834 behave like their parent as far as block copy is concerned. */
1835 gcc_assert (!elt->is_group);
1837 /* If scalarization is disabled, respect it. */
1838 if (elt->cannot_scalarize)
1840 elt->use_block_copy = 1;
1844 fputs ("Scalarization disabled for ", dump_file);
1845 dump_sra_elt_name (dump_file, elt);
1846 fputc ('\n', dump_file);
1849 /* Disable scalarization of sub-elements */
1850 for (c = elt->children; c; c = c->sibling)
1852 c->cannot_scalarize = 1;
1853 decide_block_copy (c);
1856 /* Groups behave like their parent. */
1857 for (c = elt->groups; c; c = c->sibling)
1859 c->cannot_scalarize = 1;
1860 c->use_block_copy = 1;
1866 /* Don't decide if we've no uses and no groups. */
1867 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1870 else if (!elt->is_scalar)
1872 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1873 bool use_block_copy = true;
1875 /* Tradeoffs for COMPLEX types pretty much always make it better
1876 to go ahead and split the components. */
1877 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1878 use_block_copy = false;
1880 /* Don't bother trying to figure out the rest if the structure is
1881 so large we can't do easy arithmetic. This also forces block
1882 copies for variable sized structures. */
1883 else if (host_integerp (size_tree, 1))
1885 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1886 unsigned int max_size, max_count, inst_count, full_count;
1888 /* If the sra-max-structure-size parameter is 0, then the
1889 user has not overridden the parameter and we can choose a
1890 sensible default. */
1891 max_size = SRA_MAX_STRUCTURE_SIZE
1892 ? SRA_MAX_STRUCTURE_SIZE
1893 : MOVE_RATIO (optimize_function_for_speed_p (cfun)) * UNITS_PER_WORD;
1894 max_count = SRA_MAX_STRUCTURE_COUNT
1895 ? SRA_MAX_STRUCTURE_COUNT
1896 : MOVE_RATIO (optimize_function_for_speed_p (cfun));
1898 full_size = tree_low_cst (size_tree, 1);
1899 full_count = count_type_elements (elt->type, false);
1900 inst_count = sum_instantiated_sizes (elt, &inst_size);
1902 /* If there is only one scalar field in the record, don't block copy. */
1903 if (single_scalar_field_in_record_p (elt->type))
1904 use_block_copy = false;
1906 /* ??? What to do here. If there are two fields, and we've only
1907 instantiated one, then instantiating the other is clearly a win.
1908 If there are a large number of fields then the size of the copy
1909 is much more of a factor. */
1911 /* If the structure is small, and we've made copies, go ahead
1912 and instantiate, hoping that the copies will go away. */
1913 if (full_size <= max_size
1914 && (full_count - inst_count) <= max_count
1915 && elt->n_copies > elt->n_uses)
1916 use_block_copy = false;
1917 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1918 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1919 use_block_copy = false;
1921 /* In order to avoid block copy, we have to be able to instantiate
1922 all elements of the type. See if this is possible. */
1924 && (!can_completely_scalarize_p (elt)
1925 || !type_can_instantiate_all_elements (elt->type)))
1926 use_block_copy = true;
1929 elt->use_block_copy = use_block_copy;
1931 /* Groups behave like their parent. */
1932 for (c = elt->groups; c; c = c->sibling)
1933 c->use_block_copy = use_block_copy;
1937 fprintf (dump_file, "Using %s for ",
1938 use_block_copy ? "block-copy" : "element-copy");
1939 dump_sra_elt_name (dump_file, elt);
1940 fputc ('\n', dump_file);
1943 if (!use_block_copy)
1945 instantiate_missing_elements (elt);
1950 any_inst = elt->replacement != NULL;
1952 for (c = elt->children; c ; c = c->sibling)
1953 any_inst |= decide_block_copy (c);
1958 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1961 decide_instantiations (void)
1965 bitmap_head done_head;
1968 /* We cannot clear bits from a bitmap we're iterating over,
1969 so save up all the bits to clear until the end. */
1970 bitmap_initialize (&done_head, &bitmap_default_obstack);
1971 cleared_any = false;
1973 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1975 tree var = referenced_var (i);
1976 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1979 decide_instantiation_1 (elt, 0, 0);
1980 if (!decide_block_copy (elt))
1985 bitmap_set_bit (&done_head, i);
1992 bitmap_and_compl_into (sra_candidates, &done_head);
1993 bitmap_and_compl_into (needs_copy_in, &done_head);
1995 bitmap_clear (&done_head);
1997 mark_set_for_renaming (sra_candidates);
2000 fputc ('\n', dump_file);
2004 /* Phase Four: Update the function to match the replacements created. */
2006 /* Mark all the variables in virtual operands in all the statements in
2007 LIST for renaming. */
2010 mark_all_v_defs_seq (gimple_seq seq)
2012 gimple_stmt_iterator gsi;
2014 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
2015 update_stmt_if_modified (gsi_stmt (gsi));
2018 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2021 mark_no_warning (struct sra_elt *elt)
2023 if (!elt->all_no_warning)
2025 if (elt->replacement)
2026 TREE_NO_WARNING (elt->replacement) = 1;
2030 FOR_EACH_ACTUAL_CHILD (c, elt)
2031 mark_no_warning (c);
2033 elt->all_no_warning = true;
2037 /* Build a single level component reference to ELT rooted at BASE. */
2040 generate_one_element_ref (struct sra_elt *elt, tree base)
2042 switch (TREE_CODE (TREE_TYPE (base)))
2046 tree field = elt->element;
2048 /* We can't test elt->in_bitfld_block here because, when this is
2049 called from instantiate_element, we haven't set this field
2051 if (TREE_CODE (field) == BIT_FIELD_REF)
2053 tree ret = unshare_expr (field);
2054 TREE_OPERAND (ret, 0) = base;
2058 /* Watch out for compatible records with differing field lists. */
2059 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2060 field = find_compatible_field (TREE_TYPE (base), field);
2062 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2066 if (TREE_CODE (elt->element) == RANGE_EXPR)
2067 return build4 (ARRAY_RANGE_REF, elt->type, base,
2068 TREE_OPERAND (elt->element, 0), NULL, NULL);
2070 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2073 if (elt->element == integer_zero_node)
2074 return build1 (REALPART_EXPR, elt->type, base);
2076 return build1 (IMAGPART_EXPR, elt->type, base);
2083 /* Build a full component reference to ELT rooted at its native variable. */
2086 generate_element_ref (struct sra_elt *elt)
2089 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2091 return elt->element;
2094 /* Return true if BF is a bit-field that we can handle like a scalar. */
2097 scalar_bitfield_p (tree bf)
2099 return (TREE_CODE (bf) == BIT_FIELD_REF
2100 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2101 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2102 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2103 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2104 (TREE_OPERAND (bf, 0))))
2105 <= BITS_PER_WORD)))));
2108 /* Create an assignment statement from SRC to DST. */
2111 sra_build_assignment (tree dst, tree src)
2114 gimple_seq seq = NULL, seq2 = NULL;
2115 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2116 to do a much better job at optimizing the code.
2117 From dst = BIT_FIELD_REF <var, sz, off> we produce
2119 SR.1 = (scalar type) var;
2121 SR.3 = SR.2 & ((1 << sz) - 1);
2122 ... possible sign extension of SR.3 ...
2123 dst = (destination type) SR.3;
2125 if (scalar_bitfield_p (src))
2127 tree var, shift, width;
2129 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2130 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2131 struct gimplify_ctx gctx;
2133 var = TREE_OPERAND (src, 0);
2134 width = TREE_OPERAND (src, 1);
2135 /* The offset needs to be adjusted to a right shift quantity
2136 depending on the endianness. */
2137 if (BYTES_BIG_ENDIAN)
2139 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2140 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2143 shift = TREE_OPERAND (src, 2);
2145 /* In weird cases we have non-integral types for the source or
2147 ??? For unknown reasons we also want an unsigned scalar type. */
2148 stype = TREE_TYPE (var);
2149 if (!INTEGRAL_TYPE_P (stype))
2150 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2151 (TYPE_SIZE (stype)), 1);
2152 else if (!TYPE_UNSIGNED (stype))
2153 stype = unsigned_type_for (stype);
2155 utype = TREE_TYPE (dst);
2156 if (!INTEGRAL_TYPE_P (utype))
2157 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2158 (TYPE_SIZE (utype)), 1);
2159 else if (!TYPE_UNSIGNED (utype))
2160 utype = unsigned_type_for (utype);
2162 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2163 we use for computation if we cannot use it directly. */
2164 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2165 var = fold_convert (stype, var);
2167 var = fold_build1 (VIEW_CONVERT_EXPR, stype, var);
2169 if (!integer_zerop (shift))
2170 var = fold_build2 (RSHIFT_EXPR, stype, var, shift);
2172 /* If we need a masking operation, produce one. */
2173 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2177 tree one = build_int_cst_wide (stype, 1, 0);
2178 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2179 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2180 var = fold_build2 (BIT_AND_EXPR, stype, var, mask);
2183 /* After shifting and masking, convert to the target type. */
2184 var = fold_convert (utype, var);
2186 /* Perform sign extension, if required.
2187 ??? This should never be necessary. */
2190 tree signbit = int_const_binop (LSHIFT_EXPR,
2191 build_int_cst_wide (utype, 1, 0),
2192 size_binop (MINUS_EXPR, width,
2193 bitsize_int (1)), 0);
2195 var = fold_build2 (BIT_XOR_EXPR, utype, var, signbit);
2196 var = fold_build2 (MINUS_EXPR, utype, var, signbit);
2199 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2202 /* Finally, move and convert to the destination. */
2203 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2204 var = fold_convert (TREE_TYPE (dst), var);
2206 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2208 push_gimplify_context (&gctx);
2209 gctx.allow_rhs_cond_expr = true;
2211 gimplify_assign (dst, var, &seq);
2213 if (gimple_referenced_vars (cfun))
2214 for (var = gctx.temps; var; var = TREE_CHAIN (var))
2216 add_referenced_var (var);
2217 mark_sym_for_renaming (var);
2219 pop_gimplify_context (NULL);
2224 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2225 if (CONVERT_EXPR_P (dst))
2228 src = fold_convert (TREE_TYPE (dst), src);
2230 /* It was hoped that we could perform some type sanity checking
2231 here, but since front-ends can emit accesses of fields in types
2232 different from their nominal types and copy structures containing
2233 them as a whole, we'd have to handle such differences here.
2234 Since such accesses under different types require compatibility
2235 anyway, there's little point in making tests and/or adding
2236 conversions to ensure the types of src and dst are the same.
2237 So we just assume type differences at this point are ok.
2238 The only exception we make here are pointer types, which can be different
2239 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2240 else if (POINTER_TYPE_P (TREE_TYPE (dst))
2241 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2242 src = fold_convert (TREE_TYPE (dst), src);
2244 /* ??? Only call the gimplifier if we need to. Otherwise we may
2245 end up substituting with DECL_VALUE_EXPR - see PR37380. */
2246 if (!handled_component_p (src)
2247 && !SSA_VAR_P (src))
2249 src = force_gimple_operand (src, &seq2, false, NULL_TREE);
2250 gimple_seq_add_seq (&seq, seq2);
2252 stmt = gimple_build_assign (dst, src);
2253 gimple_seq_add_stmt (&seq, stmt);
2257 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2258 takes care of assignments, but we must create copies for uses. */
2259 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2261 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2262 BIT_FIELD_REF, turn it into bit operations. */
2265 sra_build_bf_assignment (tree dst, tree src)
2267 tree var, type, utype, tmp, tmp2, tmp3;
2270 tree cst, cst2, mask;
2271 tree minshift, maxshift;
2273 if (TREE_CODE (dst) != BIT_FIELD_REF)
2274 return sra_build_assignment (dst, src);
2276 var = TREE_OPERAND (dst, 0);
2278 if (!scalar_bitfield_p (dst))
2279 return sra_build_assignment (REPLDUP (dst), src);
2283 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2284 cst2 = size_binop (PLUS_EXPR,
2285 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2288 if (BYTES_BIG_ENDIAN)
2290 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2291 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2299 type = TREE_TYPE (var);
2300 if (!INTEGRAL_TYPE_P (type))
2301 type = lang_hooks.types.type_for_size
2302 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2303 if (TYPE_UNSIGNED (type))
2306 utype = unsigned_type_for (type);
2308 mask = build_int_cst_wide (utype, 1, 0);
2309 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2310 cst = build_int_cst_wide (utype, 0, 0);
2312 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2313 if (integer_zerop (minshift))
2316 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2317 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2318 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2320 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2321 && !integer_zerop (mask))
2324 if (!is_gimple_variable (tmp))
2325 tmp = unshare_expr (var);
2327 TREE_NO_WARNING (var) = true;
2329 tmp2 = make_rename_temp (utype, "SR");
2331 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2332 tmp = fold_convert (utype, tmp);
2334 tmp = fold_build1 (VIEW_CONVERT_EXPR, utype, tmp);
2336 stmt = gimple_build_assign (tmp2, tmp);
2337 gimple_seq_add_stmt (&seq, stmt);
2342 if (!integer_zerop (mask))
2344 tmp = make_rename_temp (utype, "SR");
2345 stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
2347 gimple_seq_add_stmt (&seq, stmt);
2352 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2354 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2357 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2358 tmp_seq = sra_build_assignment (tmp2, src);
2359 gimple_seq_add_seq (&seq, tmp_seq);
2364 tmp2 = make_rename_temp
2365 (lang_hooks.types.type_for_size
2366 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2368 tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2369 TREE_TYPE (tmp2), src));
2370 gimple_seq_add_seq (&seq, tmp_seq);
2373 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2376 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2377 tmp3 = make_rename_temp (ut, "SR");
2378 tmp2 = fold_convert (ut, tmp2);
2379 tmp_seq = sra_build_assignment (tmp3, tmp2);
2380 gimple_seq_add_seq (&seq, tmp_seq);
2382 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2383 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2384 tmp2 = fold_convert (ut, tmp2);
2385 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2389 tmp3 = make_rename_temp (ut, "SR");
2390 tmp_seq = sra_build_assignment (tmp3, tmp2);
2391 gimple_seq_add_seq (&seq, tmp_seq);
2397 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2400 tmp3 = make_rename_temp (utype, "SR");
2401 tmp2 = fold_convert (utype, tmp2);
2402 tmp_seq = sra_build_assignment (tmp3, tmp2);
2403 gimple_seq_add_seq (&seq, tmp_seq);
2407 if (!integer_zerop (minshift))
2409 tmp3 = make_rename_temp (utype, "SR");
2410 stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
2412 gimple_seq_add_stmt (&seq, stmt);
2416 if (utype != TREE_TYPE (var))
2417 tmp3 = make_rename_temp (utype, "SR");
2420 stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
2422 gimple_seq_add_stmt (&seq, stmt);
2426 if (TREE_TYPE (var) == type)
2427 stmt = gimple_build_assign (var, fold_convert (type, tmp3));
2429 stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
2430 TREE_TYPE (var), tmp3));
2431 gimple_seq_add_stmt (&seq, stmt);
2437 /* Expand an assignment of SRC to the scalarized representation of
2438 ELT. If it is a field group, try to widen the assignment to cover
2439 the full variable. */
2442 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2444 tree dst = elt->replacement;
2445 tree var, tmp, cst, cst2;
2449 if (TREE_CODE (dst) != BIT_FIELD_REF
2450 || !elt->in_bitfld_block)
2451 return sra_build_assignment (REPLDUP (dst), src);
2453 var = TREE_OPERAND (dst, 0);
2455 /* Try to widen the assignment to the entire variable.
2456 We need the source to be a BIT_FIELD_REF as well, such that, for
2457 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2458 by design, conditions are met such that we can turn it into
2459 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2460 if (elt->in_bitfld_block == 2
2461 && TREE_CODE (src) == BIT_FIELD_REF)
2464 cst = TYPE_SIZE (TREE_TYPE (var));
2465 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2466 TREE_OPERAND (dst, 2));
2468 src = TREE_OPERAND (src, 0);
2470 /* Avoid full-width bit-fields. */
2471 if (integer_zerop (cst2)
2472 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2474 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2475 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2476 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2478 /* If a single conversion won't do, we'll need a statement
2480 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2481 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2486 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2487 src = fold_build1 (VIEW_CONVERT_EXPR,
2488 lang_hooks.types.type_for_size
2490 (TYPE_SIZE (TREE_TYPE (src))),
2492 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2494 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2495 stmt = gimple_build_assign (tmp, src);
2496 gimple_seq_add_stmt (&seq, stmt);
2498 tmp_seq = sra_build_assignment (var,
2499 fold_convert (TREE_TYPE (var),
2501 gimple_seq_add_seq (&seq, tmp_seq);
2506 src = fold_convert (TREE_TYPE (var), src);
2510 src = fold_convert (TREE_TYPE (var), tmp);
2513 return sra_build_assignment (var, src);
2516 return sra_build_bf_assignment (dst, src);
2519 /* Generate a set of assignment statements in *LIST_P to copy all
2520 instantiated elements under ELT to or from the equivalent structure
2521 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2522 true meaning to copy out of EXPR into ELT. */
2525 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2532 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2533 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2537 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2539 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2542 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2543 tmp_seq = sra_build_bf_assignment (expr, t);
2544 SSA_NAME_DEF_STMT (expr) = gimple_seq_last_stmt (tmp_seq);
2545 gimple_seq_add_seq (seq_p, tmp_seq);
2547 else if (elt->replacement)
2550 tmp_seq = sra_build_elt_assignment (elt, expr);
2552 tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2553 gimple_seq_add_seq (seq_p, tmp_seq);
2557 FOR_EACH_ACTUAL_CHILD (c, elt)
2559 t = generate_one_element_ref (c, unshare_expr (expr));
2560 generate_copy_inout (c, copy_out, t, seq_p);
2565 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2566 elements under SRC to their counterparts under DST. There must be a 1-1
2567 correspondence of instantiated elements. */
2570 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
2572 struct sra_elt *dc, *sc;
2574 FOR_EACH_ACTUAL_CHILD (dc, dst)
2576 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2577 if (!sc && dc->in_bitfld_block == 2)
2579 struct sra_elt *dcs;
2581 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2583 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2585 generate_element_copy (dcs, sc, seq_p);
2591 /* If DST and SRC are structs with the same elements, but do not have
2592 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2593 will fail. Try harder by finding the corresponding FIELD_DECL
2599 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2600 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2601 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2602 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2603 DECL_FIELD_OFFSET (dc->element)) > 0
2604 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2605 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2606 && simple_cst_equal (DECL_SIZE (f),
2607 DECL_SIZE (dc->element)) > 0
2608 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2610 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2611 && POINTER_TYPE_P (TREE_TYPE (f)))))
2613 gcc_assert (f != NULL_TREE);
2614 sc = lookup_element (src, f, NULL, NO_INSERT);
2617 generate_element_copy (dc, sc, seq_p);
2620 if (dst->replacement)
2624 gcc_assert (src->replacement);
2626 tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2627 gimple_seq_add_seq (seq_p, tmp_seq);
2631 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2632 elements under ELT. In addition, do not assign to elements that have been
2633 marked VISITED but do reset the visited flag; this allows easy coordination
2634 with generate_element_init. */
2637 generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
2643 elt->visited = false;
2647 if (!elt->in_bitfld_block)
2648 FOR_EACH_ACTUAL_CHILD (c, elt)
2649 generate_element_zero (c, seq_p);
2651 if (elt->replacement)
2656 gcc_assert (elt->is_scalar);
2657 t = fold_convert (elt->type, integer_zero_node);
2659 tmp_seq = sra_build_elt_assignment (elt, t);
2660 gimple_seq_add_seq (seq_p, tmp_seq);
2664 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2665 Add the result to *LIST_P. */
2668 generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2670 gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
2671 gimple_seq_add_seq (seq_p, tmp_seq);
2674 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2675 elements under ELT with the contents of the initializer INIT. In addition,
2676 mark all assigned elements VISITED; this allows easy coordination with
2677 generate_element_zero. Return false if we found a case we couldn't
2681 generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2684 enum tree_code init_code;
2685 struct sra_elt *sub;
2687 unsigned HOST_WIDE_INT idx;
2688 tree value, purpose;
2690 /* We can be passed DECL_INITIAL of a static variable. It might have a
2691 conversion, which we strip off here. */
2692 STRIP_USELESS_TYPE_CONVERSION (init);
2693 init_code = TREE_CODE (init);
2697 if (elt->replacement)
2699 generate_one_element_init (elt, init, seq_p);
2700 elt->visited = true;
2709 FOR_EACH_ACTUAL_CHILD (sub, elt)
2711 if (sub->element == integer_zero_node)
2712 t = (init_code == COMPLEX_EXPR
2713 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2715 t = (init_code == COMPLEX_EXPR
2716 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2717 result &= generate_element_init_1 (sub, t, seq_p);
2722 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2724 /* Array constructors are routinely created with NULL indices. */
2725 if (purpose == NULL_TREE)
2730 if (TREE_CODE (purpose) == RANGE_EXPR)
2732 tree lower = TREE_OPERAND (purpose, 0);
2733 tree upper = TREE_OPERAND (purpose, 1);
2737 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2739 result &= generate_element_init_1 (sub, value, seq_p);
2740 if (tree_int_cst_equal (lower, upper))
2742 lower = int_const_binop (PLUS_EXPR, lower,
2743 integer_one_node, true);
2748 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2750 result &= generate_element_init_1 (sub, value, seq_p);
2756 elt->visited = true;
2763 /* A wrapper function for generate_element_init_1 that handles cleanup after
2767 generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2770 struct gimplify_ctx gctx;
2772 push_gimplify_context (&gctx);
2773 ret = generate_element_init_1 (elt, init, seq_p);
2774 pop_gimplify_context (NULL);
2776 /* The replacement can expose previously unreferenced variables. */
2779 gimple_stmt_iterator i;
2781 for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
2782 find_new_referenced_vars (gsi_stmt (i));
2788 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
2789 if BB has more than one edge, STMT will be replicated for each edge.
2790 Also, abnormal edges will be ignored. */
2793 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
2797 unsigned n_copies = -1;
2799 FOR_EACH_EDGE (e, ei, bb->succs)
2800 if (!(e->flags & EDGE_ABNORMAL))
2803 FOR_EACH_EDGE (e, ei, bb->succs)
2804 if (!(e->flags & EDGE_ABNORMAL))
2805 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
2808 /* Helper function to insert LIST before GSI, and set up line number info. */
2811 sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
2813 gimple stmt = gsi_stmt (*gsi);
2815 if (gimple_has_location (stmt))
2816 annotate_all_with_location (seq, gimple_location (stmt));
2817 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
2820 /* Similarly, but insert after GSI. Handles insertion onto edges as well. */
2823 sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
2825 gimple stmt = gsi_stmt (*gsi);
2827 if (gimple_has_location (stmt))
2828 annotate_all_with_location (seq, gimple_location (stmt));
2830 if (stmt_ends_bb_p (stmt))
2831 insert_edge_copies_seq (seq, gsi_bb (*gsi));
2833 gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
2836 /* Similarly, but replace the statement at GSI. */
2839 sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
2841 sra_insert_before (gsi, seq);
2842 unlink_stmt_vdef (gsi_stmt (*gsi));
2843 gsi_remove (gsi, false);
2844 if (gsi_end_p (*gsi))
2845 *gsi = gsi_last (gsi_seq (*gsi));
2850 /* Data structure that bitfield_overlaps_p fills in with information
2851 about the element passed in and how much of it overlaps with the
2852 bit-range passed it to. */
2854 struct bitfield_overlap_info
2856 /* The bit-length of an element. */
2859 /* The bit-position of the element in its parent. */
2862 /* The number of bits of the element that overlap with the incoming
2866 /* The first bit of the element that overlaps with the incoming bit
2871 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2872 expression (referenced as BF below) accesses any of the bits in FLD,
2873 false if it doesn't. If DATA is non-null, its field_len and
2874 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2875 field_len, field_pos> (referenced as BFLD below) represents the
2876 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2877 overlap_pos> represents the portion of the entire field that
2878 overlaps with BF. */
2881 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2882 struct bitfield_overlap_info *data)
2887 if (TREE_CODE (fld->element) == FIELD_DECL)
2889 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2890 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2891 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2892 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2894 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2896 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2897 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2899 else if (TREE_CODE (fld->element) == INTEGER_CST)
2901 tree domain_type = TYPE_DOMAIN (TREE_TYPE (fld->parent->element));
2902 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2903 fpos = fold_convert (bitsizetype, fld->element);
2904 if (domain_type && TYPE_MIN_VALUE (domain_type))
2905 fpos = size_binop (MINUS_EXPR, fpos,
2906 fold_convert (bitsizetype,
2907 TYPE_MIN_VALUE (domain_type)));
2908 fpos = size_binop (MULT_EXPR, flen, fpos);
2913 gcc_assert (host_integerp (blen, 1)
2914 && host_integerp (bpos, 1)
2915 && host_integerp (flen, 1)
2916 && host_integerp (fpos, 1));
2918 ret = ((!tree_int_cst_lt (fpos, bpos)
2919 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2921 || (!tree_int_cst_lt (bpos, fpos)
2922 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2932 data->field_len = flen;
2933 data->field_pos = fpos;
2935 fend = size_binop (PLUS_EXPR, fpos, flen);
2936 bend = size_binop (PLUS_EXPR, bpos, blen);
2938 if (tree_int_cst_lt (bend, fend))
2939 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
2941 data->overlap_len = NULL;
2943 if (tree_int_cst_lt (fpos, bpos))
2945 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
2946 data->overlap_len = size_binop (MINUS_EXPR,
2953 data->overlap_pos = NULL;
2959 /* Add to LISTP a sequence of statements that copies BLEN bits between
2960 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
2961 and at bit BPOS of ELT. The direction of the copy is given by
2965 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
2966 gimple_seq *seq_p, tree blen, tree bpos,
2967 struct sra_elt *elt)
2969 struct sra_elt *fld;
2970 struct bitfield_overlap_info flp;
2972 FOR_EACH_ACTUAL_CHILD (fld, elt)
2976 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
2979 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
2980 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
2982 if (fld->replacement)
2984 tree infld, invar, type;
2987 infld = fld->replacement;
2989 type = unsigned_type_for (TREE_TYPE (infld));
2990 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
2991 type = build_nonstandard_integer_type (TREE_INT_CST_LOW (flen), 1);
2993 if (TREE_CODE (infld) == BIT_FIELD_REF)
2995 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
2996 infld = TREE_OPERAND (infld, 0);
2998 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
2999 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3000 DECL_SIZE (fld->element)))
3002 fpos = size_binop (PLUS_EXPR, fpos,
3003 TYPE_SIZE (TREE_TYPE (infld)));
3004 fpos = size_binop (MINUS_EXPR, fpos,
3005 DECL_SIZE (fld->element));
3008 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3010 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3011 if (flp.overlap_pos)
3012 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3013 invar = size_binop (PLUS_EXPR, invar, vpos);
3015 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3018 st = sra_build_bf_assignment (invar, infld);
3020 st = sra_build_bf_assignment (infld, invar);
3022 gimple_seq_add_seq (seq_p, st);
3026 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3027 sub = size_binop (PLUS_EXPR, vpos, sub);
3028 if (flp.overlap_pos)
3029 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3031 sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
3037 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3038 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3039 into the full variable, and to LISTAFTERP, if non-NULL, statements
3040 that copy the (presumably modified) overlapping portions of the
3041 full variable back to the scalarized variables. */
3044 sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
3045 gimple_seq *seq_after_p,
3046 tree blen, tree bpos,
3047 struct sra_elt *elt)
3049 struct sra_elt *fld;
3050 struct bitfield_overlap_info flp;
3052 FOR_EACH_ACTUAL_CHILD (fld, elt)
3053 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3055 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3057 generate_copy_inout (fld, false, generate_element_ref (fld),
3059 mark_no_warning (fld);
3061 generate_copy_inout (fld, true, generate_element_ref (fld),
3066 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3067 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3069 sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
3075 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3076 if elt is scalar, or some occurrence of ELT that requires a complete
3077 aggregate. IS_OUTPUT is true if ELT is being modified. */
3080 scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
3081 bool is_output, bool use_all)
3083 gimple stmt = gsi_stmt (*gsi);
3086 if (elt->replacement)
3088 tree replacement = elt->replacement;
3090 /* If we have a replacement, then updating the reference is as
3091 simple as modifying the existing statement in place. */
3093 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3094 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3095 && is_gimple_assign (stmt)
3096 && gimple_assign_lhs_ptr (stmt) == expr_p)
3099 /* RHS must be a single operand. */
3100 gcc_assert (gimple_assign_single_p (stmt));
3101 newseq = sra_build_elt_assignment (elt, gimple_assign_rhs1 (stmt));
3102 sra_replace (gsi, newseq);
3106 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3107 && is_gimple_assign (stmt)
3108 && gimple_assign_rhs1_ptr (stmt) == expr_p)
3110 tree tmp = make_rename_temp
3111 (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
3112 gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3114 sra_insert_before (gsi, newseq);
3118 update_stmt_if_modified (stmt);
3119 *expr_p = REPLDUP (replacement);
3122 else if (use_all && is_output
3123 && is_gimple_assign (stmt)
3124 && TREE_CODE (bfexpr
3125 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
3126 && &TREE_OPERAND (bfexpr, 0) == expr_p
3127 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3128 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3130 gimple_seq seq_before = NULL;
3131 gimple_seq seq_after = NULL;
3132 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3133 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3134 bool update = false;
3136 if (!elt->use_block_copy)
3138 tree type = TREE_TYPE (bfexpr);
3139 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3142 gimple_assign_set_lhs (stmt, var);
3145 if (!TYPE_UNSIGNED (type))
3147 type = unsigned_type_for (type);
3148 tmp = make_rename_temp (type, "SR");
3149 st = gimple_build_assign (tmp, fold_convert (type, var));
3150 gimple_seq_add_stmt (&seq_after, st);
3154 /* If VAR is wider than BLEN bits, it is padded at the
3155 most-significant end. We want to set VPOS such that
3156 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3157 least-significant BLEN bits of VAR. */
3158 if (BYTES_BIG_ENDIAN)
3159 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3161 vpos = bitsize_int (0);
3162 sra_explode_bitfield_assignment
3163 (var, vpos, false, &seq_after, blen, bpos, elt);
3166 sra_sync_for_bitfield_assignment
3167 (&seq_before, &seq_after, blen, bpos, elt);
3171 mark_all_v_defs_seq (seq_before);
3172 sra_insert_before (gsi, seq_before);
3176 mark_all_v_defs_seq (seq_after);
3177 sra_insert_after (gsi, seq_after);
3183 else if (use_all && !is_output
3184 && is_gimple_assign (stmt)
3185 && TREE_CODE (bfexpr
3186 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
3187 && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
3188 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3189 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3191 gimple_seq seq = NULL;
3192 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3193 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3194 bool update = false;
3196 if (!elt->use_block_copy)
3198 tree type = TREE_TYPE (bfexpr);
3199 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3202 gimple_assign_set_rhs1 (stmt, var);
3205 if (!TYPE_UNSIGNED (type))
3207 type = unsigned_type_for (type);
3208 tmp = make_rename_temp (type, "SR");
3209 st = gimple_build_assign (var,
3210 fold_convert (TREE_TYPE (var), tmp));
3214 gimple_seq_add_stmt (&seq,
3216 (var, build_int_cst_wide (type, 0, 0)));
3218 /* If VAR is wider than BLEN bits, it is padded at the
3219 most-significant end. We want to set VPOS such that
3220 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3221 least-significant BLEN bits of VAR. */
3222 if (BYTES_BIG_ENDIAN)
3223 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3225 vpos = bitsize_int (0);
3226 sra_explode_bitfield_assignment
3227 (var, vpos, true, &seq, blen, bpos, elt);
3230 gimple_seq_add_stmt (&seq, st);
3233 sra_sync_for_bitfield_assignment
3234 (&seq, NULL, blen, bpos, elt);
3238 mark_all_v_defs_seq (seq);
3239 sra_insert_before (gsi, seq);
3247 gimple_seq seq = NULL;
3249 /* Otherwise we need some copies. If ELT is being read, then we
3250 want to store all (modified) sub-elements back into the
3251 structure before the reference takes place. If ELT is being
3252 written, then we want to load the changed values back into
3253 our shadow variables. */
3254 /* ??? We don't check modified for reads, we just always write all of
3255 the values. We should be able to record the SSA number of the VOP
3256 for which the values were last read. If that number matches the
3257 SSA number of the VOP in the current statement, then we needn't
3258 emit an assignment. This would also eliminate double writes when
3259 a structure is passed as more than one argument to a function call.
3260 This optimization would be most effective if sra_walk_function
3261 processed the blocks in dominator order. */
3263 generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
3266 mark_all_v_defs_seq (seq);
3268 sra_insert_after (gsi, seq);
3271 sra_insert_before (gsi, seq);
3273 mark_no_warning (elt);
3278 /* Scalarize a COPY. To recap, this is an assignment statement between
3279 two scalarizable references, LHS_ELT and RHS_ELT. */
3282 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3283 gimple_stmt_iterator *gsi)
3288 if (lhs_elt->replacement && rhs_elt->replacement)
3290 /* If we have two scalar operands, modify the existing statement. */
3291 stmt = gsi_stmt (*gsi);
3293 /* See the commentary in sra_walk_function concerning
3294 RETURN_EXPR, and why we should never see one here. */
3295 gcc_assert (is_gimple_assign (stmt));
3296 gcc_assert (gimple_assign_copy_p (stmt));
3299 gimple_assign_set_lhs (stmt, lhs_elt->replacement);
3300 gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
3303 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3305 /* If either side requires a block copy, then sync the RHS back
3306 to the original structure, leave the original assignment
3307 statement (which will perform the block copy), then load the
3308 LHS values out of its now-updated original structure. */
3309 /* ??? Could perform a modified pair-wise element copy. That
3310 would at least allow those elements that are instantiated in
3311 both structures to be optimized well. */
3314 generate_copy_inout (rhs_elt, false,
3315 generate_element_ref (rhs_elt), &seq);
3318 mark_all_v_defs_seq (seq);
3319 sra_insert_before (gsi, seq);
3323 generate_copy_inout (lhs_elt, true,
3324 generate_element_ref (lhs_elt), &seq);
3327 mark_all_v_defs_seq (seq);
3328 sra_insert_after (gsi, seq);
3333 /* Otherwise both sides must be fully instantiated. In which
3334 case perform pair-wise element assignments and replace the
3335 original block copy statement. */
3337 stmt = gsi_stmt (*gsi);
3338 update_stmt_if_modified (stmt);
3341 generate_element_copy (lhs_elt, rhs_elt, &seq);
3343 mark_all_v_defs_seq (seq);
3344 sra_replace (gsi, seq);
3348 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3349 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3350 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3354 scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
3357 gimple_seq seq = NULL, init_seq = NULL;
3359 /* Generate initialization statements for all members extant in the RHS. */
3362 /* Unshare the expression just in case this is from a decl's initial. */
3363 rhs = unshare_expr (rhs);
3364 result = generate_element_init (lhs_elt, rhs, &init_seq);
3369 /* If we failed to convert the entire initializer, then we must
3370 leave the structure assignment in place and must load values
3371 from the structure into the slots for which we did not find
3372 constants. The easiest way to do this is to generate a complete
3373 copy-out, and then follow that with the constant assignments
3374 that we were able to build. DCE will clean things up. */
3375 gimple_seq seq0 = NULL;
3376 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3378 gimple_seq_add_seq (&seq0, seq);
3383 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3384 a zero value. Initialize the rest of the instantiated elements. */
3385 generate_element_zero (lhs_elt, &seq);
3386 gimple_seq_add_seq (&seq, init_seq);
3389 if (lhs_elt->use_block_copy || !result)
3391 /* Since LHS is not fully instantiated, we must leave the structure
3392 assignment in place. Treating this case differently from a USE
3393 exposes constants to later optimizations. */
3396 mark_all_v_defs_seq (seq);
3397 sra_insert_after (gsi, seq);
3402 /* The LHS is fully instantiated. The list of initializations
3403 replaces the original structure assignment. */
3405 update_stmt_if_modified (gsi_stmt (*gsi));
3406 mark_all_v_defs_seq (seq);
3407 sra_replace (gsi, seq);
3411 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3412 on all INDIRECT_REFs. */
3415 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3419 if (TREE_CODE (t) == INDIRECT_REF)
3421 TREE_THIS_NOTRAP (t) = 1;
3424 else if (IS_TYPE_OR_DECL_P (t))
3430 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3431 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3432 if ELT is on the left-hand side. */
3435 scalarize_ldst (struct sra_elt *elt, tree other,
3436 gimple_stmt_iterator *gsi, bool is_output)
3438 /* Shouldn't have gotten called for a scalar. */
3439 gcc_assert (!elt->replacement);
3441 if (elt->use_block_copy)
3443 /* Since ELT is not fully instantiated, we have to leave the
3444 block copy in place. Treat this as a USE. */
3445 scalarize_use (elt, NULL, gsi, is_output, false);
3449 /* The interesting case is when ELT is fully instantiated. In this
3450 case we can have each element stored/loaded directly to/from the
3451 corresponding slot in OTHER. This avoids a block copy. */
3453 gimple_seq seq = NULL;
3454 gimple stmt = gsi_stmt (*gsi);
3456 update_stmt_if_modified (stmt);
3457 generate_copy_inout (elt, is_output, other, &seq);
3459 mark_all_v_defs_seq (seq);
3461 /* Preserve EH semantics. */
3462 if (stmt_ends_bb_p (stmt))
3464 gimple_stmt_iterator si;
3466 gimple_seq blist = NULL;
3467 bool thr = stmt_could_throw_p (stmt);
3469 /* If the last statement of this BB created an EH edge
3470 before scalarization, we have to locate the first
3471 statement that can throw in the new statement list and
3472 use that as the last statement of this BB, such that EH
3473 semantics is preserved. All statements up to this one
3474 are added to the same BB. All other statements in the
3475 list will be added to normal outgoing edges of the same
3476 BB. If they access any memory, it's the same memory, so
3477 we can assume they won't throw. */
3478 si = gsi_start (seq);
3479 for (first = gsi_stmt (si);
3480 thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
3481 first = gsi_stmt (si))
3483 gsi_remove (&si, false);
3484 gimple_seq_add_stmt (&blist, first);
3487 /* Extract the first remaining statement from LIST, this is
3488 the EH statement if there is one. */
3489 gsi_remove (&si, false);
3492 sra_insert_before (gsi, blist);
3494 /* Replace the old statement with this new representative. */
3495 gsi_replace (gsi, first, true);
3497 if (!gsi_end_p (si))
3499 /* If any reference would trap, then they all would. And more
3500 to the point, the first would. Therefore none of the rest
3501 will trap since the first didn't. Indicate this by
3502 iterating over the remaining statements and set
3503 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3506 walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
3509 while (!gsi_end_p (si));
3511 insert_edge_copies_seq (seq, gsi_bb (*gsi));
3515 sra_replace (gsi, seq);
3519 /* Generate initializations for all scalarizable parameters. */
3522 scalarize_parms (void)
3524 gimple_seq seq = NULL;
3528 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3530 tree var = referenced_var (i);
3531 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3532 generate_copy_inout (elt, true, var, &seq);
3537 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
3538 mark_all_v_defs_seq (seq);
3542 /* Entry point to phase 4. Update the function to match replacements. */
3545 scalarize_function (void)
3547 static const struct sra_walk_fns fns = {
3548 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3551 sra_walk_function (&fns);
3553 gsi_commit_edge_inserts ();
3557 /* Debug helper function. Print ELT in a nice human-readable format. */
3560 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3562 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3564 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3565 dump_sra_elt_name (f, elt->parent);
3570 dump_sra_elt_name (f, elt->parent);
3571 if (DECL_P (elt->element))
3573 if (TREE_CODE (elt->element) == FIELD_DECL)
3575 print_generic_expr (f, elt->element, dump_flags);
3577 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3578 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3579 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3580 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3581 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3582 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3583 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3584 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3586 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3587 TREE_INT_CST_LOW (elt->element));
3591 /* Likewise, but callable from the debugger. */
3594 debug_sra_elt_name (struct sra_elt *elt)
3596 dump_sra_elt_name (stderr, elt);
3597 fputc ('\n', stderr);
3601 sra_init_cache (void)
3603 if (sra_type_decomp_cache)
3606 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3607 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3611 /* Main entry point. */
3616 /* Initialize local variables. */
3617 gcc_obstack_init (&sra_obstack);
3618 sra_candidates = BITMAP_ALLOC (NULL);
3619 needs_copy_in = BITMAP_ALLOC (NULL);
3621 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3623 /* Scan. If we find anything, instantiate and scalarize. */
3624 if (find_candidates_for_sra ())
3627 decide_instantiations ();
3628 scalarize_function ();
3631 /* Free allocated memory. */
3632 htab_delete (sra_map);
3634 BITMAP_FREE (sra_candidates);
3635 BITMAP_FREE (needs_copy_in);
3636 BITMAP_FREE (sra_type_decomp_cache);
3637 BITMAP_FREE (sra_type_inst_cache);
3638 obstack_free (&sra_obstack, NULL);
3643 tree_sra_early (void)
3657 return flag_tree_sra != 0;
3660 struct gimple_opt_pass pass_sra_early =
3665 gate_sra, /* gate */
3666 tree_sra_early, /* execute */
3669 0, /* static_pass_number */
3670 TV_TREE_SRA, /* tv_id */
3671 PROP_cfg | PROP_ssa, /* properties_required */
3672 0, /* properties_provided */
3673 0, /* properties_destroyed */
3674 0, /* todo_flags_start */
3678 | TODO_verify_ssa /* todo_flags_finish */
3682 struct gimple_opt_pass pass_sra =
3687 gate_sra, /* gate */
3688 tree_sra, /* execute */
3691 0, /* static_pass_number */
3692 TV_TREE_SRA, /* tv_id */
3693 PROP_cfg | PROP_ssa, /* properties_required */
3694 0, /* properties_provided */
3695 0, /* properties_destroyed */
3696 TODO_update_address_taken, /* todo_flags_start */
3700 | TODO_verify_ssa /* todo_flags_finish */