1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
81 /* The set of aggregate variables that are candidates for scalarization. */
82 static bitmap sra_candidates;
84 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
85 beginning of the function. */
86 static bitmap needs_copy_in;
88 /* Sets of bit pairs that cache type decomposition and instantiation. */
89 static bitmap sra_type_decomp_cache;
90 static bitmap sra_type_inst_cache;
92 /* One of these structures is created for each candidate aggregate and
93 each (accessed) member or group of members of such an aggregate. */
96 /* A tree of the elements. Used when we want to traverse everything. */
97 struct sra_elt *parent;
98 struct sra_elt *groups;
99 struct sra_elt *children;
100 struct sra_elt *sibling;
102 /* If this element is a root, then this is the VAR_DECL. If this is
103 a sub-element, this is some token used to identify the reference.
104 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
105 of an ARRAY_REF, this is the (constant) index. In the case of an
106 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
107 of a complex number, this is a zero or one. */
110 /* The type of the element. */
113 /* A VAR_DECL, for any sub-element we've decided to replace. */
116 /* The number of times the element is referenced as a whole. I.e.
117 given "a.b.c", this would be incremented for C, but not for A or B. */
120 /* The number of times the element is copied to or from another
121 scalarizable element. */
122 unsigned int n_copies;
124 /* True if TYPE is scalar. */
127 /* True if this element is a group of members of its parent. */
130 /* True if we saw something about this element that prevents scalarization,
131 such as non-constant indexing. */
132 bool cannot_scalarize;
134 /* True if we've decided that structure-to-structure assignment
135 should happen via memcpy and not per-element. */
138 /* True if everything under this element has been marked TREE_NO_WARNING. */
141 /* A flag for use with/after random access traversals. */
144 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
147 /* 1 if the element is a field that is part of a block, 2 if the field
148 is the block itself, 0 if it's neither. */
149 char in_bitfld_block;
152 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
154 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
155 for ((CHILD) = (ELT)->is_group \
156 ? next_child_for_group (NULL, (ELT)) \
159 (CHILD) = (ELT)->is_group \
160 ? next_child_for_group ((CHILD), (ELT)) \
163 /* Helper function for above macro. Return next child in group. */
164 static struct sra_elt *
165 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
167 gcc_assert (group->is_group);
169 /* Find the next child in the parent. */
171 child = child->sibling;
173 child = group->parent->children;
175 /* Skip siblings that do not belong to the group. */
178 tree g_elt = group->element;
179 if (TREE_CODE (g_elt) == RANGE_EXPR)
181 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
182 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
188 child = child->sibling;
194 /* Random access to the child of a parent is performed by hashing.
195 This prevents quadratic behavior, and allows SRA to function
196 reasonably on larger records. */
197 static htab_t sra_map;
199 /* All structures are allocated out of the following obstack. */
200 static struct obstack sra_obstack;
202 /* Debugging functions. */
203 static void dump_sra_elt_name (FILE *, struct sra_elt *);
204 extern void debug_sra_elt_name (struct sra_elt *);
206 /* Forward declarations. */
207 static tree generate_element_ref (struct sra_elt *);
208 static gimple_seq sra_build_assignment (tree dst, tree src);
209 static void mark_all_v_defs_seq (gimple_seq);
212 /* Return true if DECL is an SRA candidate. */
215 is_sra_candidate_decl (tree decl)
217 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
220 /* Return true if TYPE is a scalar type. */
223 is_sra_scalar_type (tree type)
225 enum tree_code code = TREE_CODE (type);
226 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
227 || code == FIXED_POINT_TYPE
228 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
229 || code == POINTER_TYPE || code == OFFSET_TYPE
230 || code == REFERENCE_TYPE);
233 /* Return true if TYPE can be decomposed into a set of independent variables.
235 Note that this doesn't imply that all elements of TYPE can be
236 instantiated, just that if we decide to break up the type into
237 separate pieces that it can be done. */
240 sra_type_can_be_decomposed_p (tree type)
242 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
245 /* Avoid searching the same type twice. */
246 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
248 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
251 /* The type must have a definite nonzero size. */
252 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
253 || integer_zerop (TYPE_SIZE (type)))
256 /* The type must be a non-union aggregate. */
257 switch (TREE_CODE (type))
261 bool saw_one_field = false;
263 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
264 if (TREE_CODE (t) == FIELD_DECL)
266 /* Reject incorrectly represented bit fields. */
267 if (DECL_BIT_FIELD (t)
268 && INTEGRAL_TYPE_P (TREE_TYPE (t))
269 && (tree_low_cst (DECL_SIZE (t), 1)
270 != TYPE_PRECISION (TREE_TYPE (t))))
273 saw_one_field = true;
276 /* Record types must have at least one field. */
283 /* Array types must have a fixed lower and upper bound. */
284 t = TYPE_DOMAIN (type);
287 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
289 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
300 bitmap_set_bit (sra_type_decomp_cache, cache+0);
304 bitmap_set_bit (sra_type_decomp_cache, cache+1);
308 /* Returns true if the TYPE is one of the available va_list types.
309 Otherwise it returns false.
310 Note, that for multiple calling conventions there can be more
311 than just one va_list type present. */
314 is_va_list_type (tree type)
318 if (type == NULL_TREE)
320 h = targetm.canonical_va_list_type (type);
323 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (h))
328 /* Return true if DECL can be decomposed into a set of independent
329 (though not necessarily scalar) variables. */
332 decl_can_be_decomposed_p (tree var)
334 /* Early out for scalars. */
335 if (is_sra_scalar_type (TREE_TYPE (var)))
338 /* The variable must not be aliased. */
339 if (!is_gimple_non_addressable (var))
341 if (dump_file && (dump_flags & TDF_DETAILS))
343 fprintf (dump_file, "Cannot scalarize variable ");
344 print_generic_expr (dump_file, var, dump_flags);
345 fprintf (dump_file, " because it must live in memory\n");
350 /* The variable must not be volatile. */
351 if (TREE_THIS_VOLATILE (var))
353 if (dump_file && (dump_flags & TDF_DETAILS))
355 fprintf (dump_file, "Cannot scalarize variable ");
356 print_generic_expr (dump_file, var, dump_flags);
357 fprintf (dump_file, " because it is declared volatile\n");
362 /* We must be able to decompose the variable's type. */
363 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
365 if (dump_file && (dump_flags & TDF_DETAILS))
367 fprintf (dump_file, "Cannot scalarize variable ");
368 print_generic_expr (dump_file, var, dump_flags);
369 fprintf (dump_file, " because its type cannot be decomposed\n");
374 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
375 confuse tree-stdarg.c, and we won't be able to figure out which and
376 how many arguments are accessed. This really should be improved in
377 tree-stdarg.c, as the decomposition is truly a win. This could also
378 be fixed if the stdarg pass ran early, but this can't be done until
379 we've aliasing information early too. See PR 30791. */
380 if (early_sra && is_va_list_type (TREE_TYPE (var)))
386 /* Return true if TYPE can be *completely* decomposed into scalars. */
389 type_can_instantiate_all_elements (tree type)
391 if (is_sra_scalar_type (type))
393 if (!sra_type_can_be_decomposed_p (type))
396 switch (TREE_CODE (type))
400 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
403 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
405 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
408 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
409 if (TREE_CODE (f) == FIELD_DECL)
411 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
413 bitmap_set_bit (sra_type_inst_cache, cache+1);
418 bitmap_set_bit (sra_type_inst_cache, cache+0);
423 return type_can_instantiate_all_elements (TREE_TYPE (type));
433 /* Test whether ELT or some sub-element cannot be scalarized. */
436 can_completely_scalarize_p (struct sra_elt *elt)
440 if (elt->cannot_scalarize)
443 for (c = elt->children; c; c = c->sibling)
444 if (!can_completely_scalarize_p (c))
447 for (c = elt->groups; c; c = c->sibling)
448 if (!can_completely_scalarize_p (c))
455 /* A simplified tree hashing algorithm that only handles the types of
456 trees we expect to find in sra_elt->element. */
459 sra_hash_tree (tree t)
463 switch (TREE_CODE (t))
472 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
476 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
477 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
481 /* We can have types that are compatible, but have different member
482 lists, so we can't hash fields by ID. Use offsets instead. */
483 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
484 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
488 /* Don't take operand 0 into account, that's our parent. */
489 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
490 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
500 /* Hash function for type SRA_PAIR. */
503 sra_elt_hash (const void *x)
505 const struct sra_elt *const e = (const struct sra_elt *) x;
506 const struct sra_elt *p;
509 h = sra_hash_tree (e->element);
511 /* Take into account everything except bitfield blocks back up the
512 chain. Given that chain lengths are rarely very long, this
513 should be acceptable. If we truly identify this as a performance
514 problem, it should work to hash the pointer value
516 for (p = e->parent; p ; p = p->parent)
517 if (!p->in_bitfld_block)
518 h = (h * 65521) ^ sra_hash_tree (p->element);
523 /* Equality function for type SRA_PAIR. */
526 sra_elt_eq (const void *x, const void *y)
528 const struct sra_elt *const a = (const struct sra_elt *) x;
529 const struct sra_elt *const b = (const struct sra_elt *) y;
531 const struct sra_elt *ap = a->parent;
532 const struct sra_elt *bp = b->parent;
535 while (ap->in_bitfld_block)
538 while (bp->in_bitfld_block)
549 if (TREE_CODE (ae) != TREE_CODE (be))
552 switch (TREE_CODE (ae))
557 /* These are all pointer unique. */
561 /* Integers are not pointer unique, so compare their values. */
562 return tree_int_cst_equal (ae, be);
566 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
567 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
570 /* Fields are unique within a record, but not between
571 compatible records. */
572 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
574 return fields_compatible_p (ae, be);
578 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
579 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
586 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
587 may be null, in which case CHILD must be a DECL. */
589 static struct sra_elt *
590 lookup_element (struct sra_elt *parent, tree child, tree type,
591 enum insert_option insert)
593 struct sra_elt dummy;
594 struct sra_elt **slot;
598 dummy.parent = parent->is_group ? parent->parent : parent;
601 dummy.element = child;
603 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
604 if (!slot && insert == NO_INSERT)
608 if (!elt && insert == INSERT)
610 *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
611 memset (elt, 0, sizeof (*elt));
613 elt->parent = parent;
614 elt->element = child;
616 elt->is_scalar = is_sra_scalar_type (type);
620 if (IS_ELEMENT_FOR_GROUP (elt->element))
622 elt->is_group = true;
623 elt->sibling = parent->groups;
624 parent->groups = elt;
628 elt->sibling = parent->children;
629 parent->children = elt;
633 /* If this is a parameter, then if we want to scalarize, we have
634 one copy from the true function parameter. Count it now. */
635 if (TREE_CODE (child) == PARM_DECL)
638 bitmap_set_bit (needs_copy_in, DECL_UID (child));
645 /* Create or return the SRA_ELT structure for EXPR if the expression
646 refers to a scalarizable variable. */
648 static struct sra_elt *
649 maybe_lookup_element_for_expr (tree expr)
654 switch (TREE_CODE (expr))
659 if (is_sra_candidate_decl (expr))
660 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
664 /* We can't scalarize variable array indices. */
665 if (in_array_bounds_p (expr))
666 child = TREE_OPERAND (expr, 1);
671 case ARRAY_RANGE_REF:
672 /* We can't scalarize variable array indices. */
673 if (range_in_array_bounds_p (expr))
675 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
676 child = build2 (RANGE_EXPR, integer_type_node,
677 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
685 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
686 /* Don't look through unions. */
687 if (TREE_CODE (type) != RECORD_TYPE)
689 /* Neither through variable-sized records. */
690 if (TYPE_SIZE (type) == NULL_TREE
691 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
693 child = TREE_OPERAND (expr, 1);
698 child = integer_zero_node;
701 child = integer_one_node;
708 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
710 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
715 /* Functions to walk just enough of the tree to see all scalarizable
716 references, and categorize them. */
718 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
719 various kinds of references seen. In all cases, *GSI is an iterator
720 pointing to the statement being processed. */
723 /* Invoked when ELT is required as a unit. Note that ELT might refer to
724 a leaf node, in which case this is a simple scalar reference. *EXPR_P
725 points to the location of the expression. IS_OUTPUT is true if this
726 is a left-hand-side reference. USE_ALL is true if we saw something we
727 couldn't quite identify and had to force the use of the entire object. */
728 void (*use) (struct sra_elt *elt, tree *expr_p,
729 gimple_stmt_iterator *gsi, bool is_output, bool use_all);
731 /* Invoked when we have a copy between two scalarizable references. */
732 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
733 gimple_stmt_iterator *gsi);
735 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
736 in which case it should be treated as an empty CONSTRUCTOR. */
737 void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
739 /* Invoked when we have a copy between one scalarizable reference ELT
740 and one non-scalarizable reference OTHER without side-effects.
741 IS_OUTPUT is true if ELT is on the left-hand side. */
742 void (*ldst) (struct sra_elt *elt, tree other,
743 gimple_stmt_iterator *gsi, bool is_output);
745 /* True during phase 2, false during phase 4. */
746 /* ??? This is a hack. */
750 #ifdef ENABLE_CHECKING
751 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
754 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
755 void *data ATTRIBUTE_UNUSED)
758 enum tree_code code = TREE_CODE (t);
760 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
763 if (is_sra_candidate_decl (t))
773 /* Walk most expressions looking for a scalarizable aggregate.
774 If we find one, invoke FNS->USE. */
777 sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
778 const struct sra_walk_fns *fns)
782 bool disable_scalarization = false;
783 bool use_all_p = false;
785 /* We're looking to collect a reference expression between EXPR and INNER,
786 such that INNER is a scalarizable decl and all other nodes through EXPR
787 are references that we can scalarize. If we come across something that
788 we can't scalarize, we reset EXPR. This has the effect of making it
789 appear that we're referring to the larger expression as a whole. */
792 switch (TREE_CODE (inner))
797 /* If there is a scalarizable decl at the bottom, then process it. */
798 if (is_sra_candidate_decl (inner))
800 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
801 if (disable_scalarization)
802 elt->cannot_scalarize = true;
804 fns->use (elt, expr_p, gsi, is_output, use_all_p);
809 /* Non-constant index means any member may be accessed. Prevent the
810 expression from being scalarized. If we were to treat this as a
811 reference to the whole array, we can wind up with a single dynamic
812 index reference inside a loop being overridden by several constant
813 index references during loop setup. It's possible that this could
814 be avoided by using dynamic usage counts based on BB trip counts
815 (based on loop analysis or profiling), but that hardly seems worth
817 /* ??? Hack. Figure out how to push this into the scan routines
818 without duplicating too much code. */
819 if (!in_array_bounds_p (inner))
821 disable_scalarization = true;
824 /* ??? Are we assured that non-constant bounds and stride will have
825 the same value everywhere? I don't think Fortran will... */
826 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
828 inner = TREE_OPERAND (inner, 0);
831 case ARRAY_RANGE_REF:
832 if (!range_in_array_bounds_p (inner))
834 disable_scalarization = true;
837 /* ??? See above non-constant bounds and stride . */
838 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
840 inner = TREE_OPERAND (inner, 0);
845 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
846 /* Don't look through unions. */
847 if (TREE_CODE (type) != RECORD_TYPE)
849 /* Neither through variable-sized records. */
850 if (TYPE_SIZE (type) == NULL_TREE
851 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
853 inner = TREE_OPERAND (inner, 0);
859 inner = TREE_OPERAND (inner, 0);
863 /* A bit field reference to a specific vector is scalarized but for
864 ones for inputs need to be marked as used on the left hand size so
865 when we scalarize it, we can mark that variable as non renamable. */
867 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
870 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
872 elt->is_vector_lhs = true;
875 /* A bit field reference (access to *multiple* fields simultaneously)
876 is not currently scalarized. Consider this an access to the full
877 outer element, to which walk_tree will bring us next. */
881 /* Similarly, a nop explicitly wants to look at an object in a
882 type other than the one we've scalarized. */
885 case VIEW_CONVERT_EXPR:
886 /* Likewise for a view conversion, but with an additional twist:
887 it can be on the LHS and, in this case, an access to the full
888 outer element would mean a killing def. So we need to punt
889 if we haven't already a full access to the current element,
890 because we cannot pretend to have a killing def if we only
891 have a partial access at some level. */
892 if (is_output && !use_all_p && inner != expr)
893 disable_scalarization = true;
897 /* This is a transparent wrapper. The entire inner expression really
902 expr_p = &TREE_OPERAND (inner, 0);
903 inner = expr = *expr_p;
908 #ifdef ENABLE_CHECKING
909 /* Validate that we're not missing any references. */
910 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
916 /* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
917 If we find one, invoke FNS->USE. */
920 sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
921 const struct sra_walk_fns *fns)
924 int nargs = gimple_call_num_args (stmt);
926 for (i = 0; i < nargs; i++)
927 sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
929 if (gimple_call_lhs (stmt))
930 sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
933 /* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
934 aggregates. If we find one, invoke FNS->USE. */
937 sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
938 const struct sra_walk_fns *fns)
941 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
942 sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
943 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
944 sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
947 /* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately. */
950 sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
951 const struct sra_walk_fns *fns)
953 struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
956 /* If there is more than 1 element on the RHS, only walk the lhs. */
957 if (!gimple_assign_single_p (stmt))
959 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
963 lhs = gimple_assign_lhs (stmt);
964 rhs = gimple_assign_rhs1 (stmt);
965 lhs_elt = maybe_lookup_element_for_expr (lhs);
966 rhs_elt = maybe_lookup_element_for_expr (rhs);
968 /* If both sides are scalarizable, this is a COPY operation. */
969 if (lhs_elt && rhs_elt)
971 fns->copy (lhs_elt, rhs_elt, gsi);
975 /* If the RHS is scalarizable, handle it. There are only two cases. */
978 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
979 fns->ldst (rhs_elt, lhs, gsi, false);
981 fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
984 /* If it isn't scalarizable, there may be scalarizable variables within, so
985 check for a call or else walk the RHS to see if we need to do any
986 copy-in operations. We need to do it before the LHS is scalarized so
987 that the statements get inserted in the proper place, before any
988 copy-out operations. */
990 sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
992 /* Likewise, handle the LHS being scalarizable. We have cases similar
993 to those above, but also want to handle RHS being constant. */
996 /* If this is an assignment from a constant, or constructor, then
997 we have access to all of the elements individually. Invoke INIT. */
998 if (TREE_CODE (rhs) == COMPLEX_EXPR
999 || TREE_CODE (rhs) == COMPLEX_CST
1000 || TREE_CODE (rhs) == CONSTRUCTOR)
1001 fns->init (lhs_elt, rhs, gsi);
1003 /* If this is an assignment from read-only memory, treat this as if
1004 we'd been passed the constructor directly. Invoke INIT. */
1005 else if (TREE_CODE (rhs) == VAR_DECL
1006 && TREE_STATIC (rhs)
1007 && !DECL_EXTERNAL (rhs)
1008 && TREE_READONLY (rhs)
1009 && targetm.binds_local_p (rhs))
1010 fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
1012 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1013 The lvalue requirement prevents us from trying to directly scalarize
1014 the result of a function call. Which would result in trying to call
1015 the function multiple times, and other evil things. */
1016 else if (!lhs_elt->is_scalar
1017 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1018 fns->ldst (lhs_elt, rhs, gsi, true);
1020 /* Otherwise we're being used in some context that requires the
1021 aggregate to be seen as a whole. Invoke USE. */
1023 fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
1026 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1027 whole is not a scalarizable reference. There may be occurrences of
1028 scalarizable variables within, which implies a USE. */
1030 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
1033 /* Entry point to the walk functions. Search the entire function,
1034 invoking the callbacks in FNS on each of the references to
1035 scalarizable variables. */
1038 sra_walk_function (const struct sra_walk_fns *fns)
1041 gimple_stmt_iterator si, ni;
1043 /* ??? Phase 4 could derive some benefit to walking the function in
1044 dominator tree order. */
1047 for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
1051 stmt = gsi_stmt (si);
1056 /* If the statement does not reference memory, then it doesn't
1057 make any structure references that we care about. */
1058 if (!gimple_references_memory_p (stmt))
1061 switch (gimple_code (stmt))
1064 /* If we have "return <retval>" then the return value is
1065 already exposed for our pleasure. Walk it as a USE to
1066 force all the components back in place for the return.
1068 if (gimple_return_retval (stmt) == NULL_TREE)
1071 sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
1076 sra_walk_gimple_assign (stmt, &si, fns);
1079 sra_walk_gimple_call (stmt, &si, fns);
1082 sra_walk_gimple_asm (stmt, &si, fns);
1091 /* Phase One: Scan all referenced variables in the program looking for
1092 structures that could be decomposed. */
1095 find_candidates_for_sra (void)
1097 bool any_set = false;
1099 referenced_var_iterator rvi;
1101 FOR_EACH_REFERENCED_VAR (var, rvi)
1103 if (decl_can_be_decomposed_p (var))
1105 bitmap_set_bit (sra_candidates, DECL_UID (var));
1114 /* Phase Two: Scan all references to scalarizable variables. Count the
1115 number of times they are used or copied respectively. */
1117 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1118 considered a copy, because we can decompose the reference such that
1119 the sub-elements needn't be contiguous. */
1122 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1123 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1124 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1130 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1131 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1133 lhs_elt->n_copies += 1;
1134 rhs_elt->n_copies += 1;
1138 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1139 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1141 lhs_elt->n_copies += 1;
1145 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1146 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1147 bool is_output ATTRIBUTE_UNUSED)
1152 /* Dump the values we collected during the scanning phase. */
1155 scan_dump (struct sra_elt *elt)
1159 dump_sra_elt_name (dump_file, elt);
1160 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1162 for (c = elt->children; c ; c = c->sibling)
1165 for (c = elt->groups; c ; c = c->sibling)
1169 /* Entry point to phase 2. Scan the entire function, building up
1170 scalarization data structures, recording copies and uses. */
1173 scan_function (void)
1175 static const struct sra_walk_fns fns = {
1176 scan_use, scan_copy, scan_init, scan_ldst, true
1180 sra_walk_function (&fns);
1182 if (dump_file && (dump_flags & TDF_DETAILS))
1186 fputs ("\nScan results:\n", dump_file);
1187 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1189 tree var = referenced_var (i);
1190 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1194 fputc ('\n', dump_file);
1198 /* Phase Three: Make decisions about which variables to scalarize, if any.
1199 All elements to be scalarized have replacement variables made for them. */
1201 /* A subroutine of build_element_name. Recursively build the element
1202 name on the obstack. */
1205 build_element_name_1 (struct sra_elt *elt)
1212 build_element_name_1 (elt->parent);
1213 obstack_1grow (&sra_obstack, '$');
1215 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1217 if (elt->element == integer_zero_node)
1218 obstack_grow (&sra_obstack, "real", 4);
1220 obstack_grow (&sra_obstack, "imag", 4);
1226 if (TREE_CODE (t) == INTEGER_CST)
1228 /* ??? Eh. Don't bother doing double-wide printing. */
1229 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1230 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1232 else if (TREE_CODE (t) == BIT_FIELD_REF)
1234 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1235 tree_low_cst (TREE_OPERAND (t, 2), 1));
1236 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1237 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1238 tree_low_cst (TREE_OPERAND (t, 1), 1));
1239 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1243 tree name = DECL_NAME (t);
1245 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1246 IDENTIFIER_LENGTH (name));
1249 sprintf (buffer, "D%u", DECL_UID (t));
1250 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1255 /* Construct a pretty variable name for an element's replacement variable.
1256 The name is built on the obstack. */
1259 build_element_name (struct sra_elt *elt)
1261 build_element_name_1 (elt);
1262 obstack_1grow (&sra_obstack, '\0');
1263 return XOBFINISH (&sra_obstack, char *);
1266 /* Instantiate an element as an independent variable. */
1269 instantiate_element (struct sra_elt *elt)
1271 struct sra_elt *base_elt;
1273 bool nowarn = TREE_NO_WARNING (elt->element);
1275 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1277 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1278 base = base_elt->element;
1280 elt->replacement = var = make_rename_temp (TYPE_MAIN_VARIANT (elt->type),
1283 if (DECL_P (elt->element)
1284 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1286 DECL_SIZE (var) = DECL_SIZE (elt->element);
1287 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1289 elt->in_bitfld_block = 1;
1290 elt->replacement = fold_build3 (BIT_FIELD_REF,
1291 TYPE_MAIN_VARIANT (elt->type), var,
1294 ? size_binop (MINUS_EXPR,
1295 TYPE_SIZE (elt->type),
1300 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1301 they are not a gimple register. */
1302 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1303 DECL_GIMPLE_REG_P (var) = 0;
1305 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1306 DECL_ARTIFICIAL (var) = 1;
1308 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1310 char *pretty_name = build_element_name (elt);
1311 DECL_NAME (var) = get_identifier (pretty_name);
1312 obstack_free (&sra_obstack, pretty_name);
1314 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1315 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1317 DECL_IGNORED_P (var) = 0;
1318 TREE_NO_WARNING (var) = nowarn;
1322 DECL_IGNORED_P (var) = 1;
1323 /* ??? We can't generate any warning that would be meaningful. */
1324 TREE_NO_WARNING (var) = 1;
1327 /* Zero-initialize bit-field scalarization variables, to avoid
1328 triggering undefined behavior. */
1329 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1330 || (var != elt->replacement
1331 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1333 gimple_seq init = sra_build_assignment (var,
1334 fold_convert (TREE_TYPE (var),
1337 insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
1338 mark_all_v_defs_seq (init);
1343 fputs (" ", dump_file);
1344 dump_sra_elt_name (dump_file, elt);
1345 fputs (" -> ", dump_file);
1346 print_generic_expr (dump_file, var, dump_flags);
1347 fputc ('\n', dump_file);
1351 /* Make one pass across an element tree deciding whether or not it's
1352 profitable to instantiate individual leaf scalars.
1354 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1355 fields all the way up the tree. */
1358 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1359 unsigned int parent_copies)
1361 if (dump_file && !elt->parent)
1363 fputs ("Initial instantiation for ", dump_file);
1364 dump_sra_elt_name (dump_file, elt);
1365 fputc ('\n', dump_file);
1368 if (elt->cannot_scalarize)
1373 /* The decision is simple: instantiate if we're used more frequently
1374 than the parent needs to be seen as a complete unit. */
1375 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1376 instantiate_element (elt);
1380 struct sra_elt *c, *group;
1381 unsigned int this_uses = elt->n_uses + parent_uses;
1382 unsigned int this_copies = elt->n_copies + parent_copies;
1384 /* Consider groups of sub-elements as weighing in favour of
1385 instantiation whatever their size. */
1386 for (group = elt->groups; group ; group = group->sibling)
1387 FOR_EACH_ACTUAL_CHILD (c, group)
1389 c->n_uses += group->n_uses;
1390 c->n_copies += group->n_copies;
1393 for (c = elt->children; c ; c = c->sibling)
1394 decide_instantiation_1 (c, this_uses, this_copies);
1398 /* Compute the size and number of all instantiated elements below ELT.
1399 We will only care about this if the size of the complete structure
1400 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1403 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1405 if (elt->replacement)
1407 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1413 unsigned int count = 0;
1415 for (c = elt->children; c ; c = c->sibling)
1416 count += sum_instantiated_sizes (c, sizep);
1422 /* Instantiate fields in ELT->TYPE that are not currently present as
1425 static void instantiate_missing_elements (struct sra_elt *elt);
1427 static struct sra_elt *
1428 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1430 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1433 if (sub->replacement == NULL)
1434 instantiate_element (sub);
1437 instantiate_missing_elements (sub);
1441 /* Obtain the canonical type for field F of ELEMENT. */
1444 canon_type_for_field (tree f, tree element)
1446 tree field_type = TREE_TYPE (f);
1448 /* canonicalize_component_ref() unwidens some bit-field types (not
1449 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1450 may introduce type mismatches. */
1451 if (INTEGRAL_TYPE_P (field_type)
1452 && DECL_MODE (f) != TYPE_MODE (field_type))
1453 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1462 /* Look for adjacent fields of ELT starting at F that we'd like to
1463 scalarize as a single variable. Return the last field of the
1467 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1470 unsigned HOST_WIDE_INT align, bit, size, alchk;
1471 enum machine_mode mode;
1472 tree first = f, prev;
1474 struct sra_elt *block;
1476 /* Point fields are typically best handled as standalone entities. */
1477 if (POINTER_TYPE_P (TREE_TYPE (f)))
1480 if (!is_sra_scalar_type (TREE_TYPE (f))
1481 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1482 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1483 || !host_integerp (DECL_SIZE (f), 1)
1484 || lookup_element (elt, f, NULL, NO_INSERT))
1489 /* For complex and array objects, there are going to be integer
1490 literals as child elements. In this case, we can't just take the
1491 alignment and mode of the decl, so we instead rely on the element
1494 ??? We could try to infer additional alignment from the full
1495 object declaration and the location of the sub-elements we're
1497 for (count = 0; !DECL_P (block->element); count++)
1498 block = block->parent;
1500 align = DECL_ALIGN (block->element);
1501 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1505 type = TREE_TYPE (block->element);
1507 type = TREE_TYPE (type);
1509 align = TYPE_ALIGN (type);
1510 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1516 /* Coalescing wider fields is probably pointless and
1518 if (align > BITS_PER_WORD)
1519 align = BITS_PER_WORD;
1521 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1522 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1523 size = tree_low_cst (DECL_SIZE (f), 1);
1528 if ((bit & alchk) != ((bit + size - 1) & alchk))
1531 /* Find adjacent fields in the same alignment word. */
1533 for (prev = f, f = TREE_CHAIN (f);
1534 f && TREE_CODE (f) == FIELD_DECL
1535 && is_sra_scalar_type (TREE_TYPE (f))
1536 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1537 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1538 && host_integerp (DECL_SIZE (f), 1)
1539 && !lookup_element (elt, f, NULL, NO_INSERT);
1540 prev = f, f = TREE_CHAIN (f))
1542 unsigned HOST_WIDE_INT nbit, nsize;
1544 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1545 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1546 nsize = tree_low_cst (DECL_SIZE (f), 1);
1548 if (bit + size == nbit)
1550 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1552 /* If we're at an alignment boundary, don't bother
1553 growing alignment such that we can include this next
1556 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1559 align = GET_MODE_BITSIZE (DECL_MODE (f));
1563 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1568 else if (nbit + nsize == bit)
1570 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1573 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1576 align = GET_MODE_BITSIZE (DECL_MODE (f));
1580 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1595 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1597 /* Try to widen the bit range so as to cover padding bits as well. */
1599 if ((bit & ~alchk) || size != align)
1601 unsigned HOST_WIDE_INT mbit = bit & alchk;
1602 unsigned HOST_WIDE_INT msize = align;
1604 for (f = TYPE_FIELDS (elt->type);
1605 f; f = TREE_CHAIN (f))
1607 unsigned HOST_WIDE_INT fbit, fsize;
1609 /* Skip the fields from first to prev. */
1616 if (!(TREE_CODE (f) == FIELD_DECL
1617 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1618 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1621 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1622 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1624 /* If we're past the selected word, we're fine. */
1625 if ((bit & alchk) < (fbit & alchk))
1628 if (host_integerp (DECL_SIZE (f), 1))
1629 fsize = tree_low_cst (DECL_SIZE (f), 1);
1631 /* Assume a variable-sized field takes up all space till
1632 the end of the word. ??? Endianness issues? */
1633 fsize = align - (fbit & alchk);
1635 if ((fbit & alchk) < (bit & alchk))
1637 /* A large field might start at a previous word and
1638 extend into the selected word. Exclude those
1639 bits. ??? Endianness issues? */
1640 HOST_WIDE_INT diff = fbit + fsize - mbit;
1650 /* Non-overlapping, great. */
1651 if (fbit + fsize <= mbit
1652 || mbit + msize <= fbit)
1657 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1661 else if (fbit > mbit)
1662 msize -= (mbit + msize - fbit);
1672 /* Now we know the bit range we're interested in. Find the smallest
1673 machine mode we can use to access it. */
1675 for (mode = smallest_mode_for_size (size, MODE_INT);
1677 mode = GET_MODE_WIDER_MODE (mode))
1679 gcc_assert (mode != VOIDmode);
1681 alchk = GET_MODE_PRECISION (mode) - 1;
1684 if ((bit & alchk) == ((bit + size - 1) & alchk))
1688 gcc_assert (~alchk < align);
1690 /* Create the field group as a single variable. */
1692 /* We used to create a type for the mode above, but size turns
1693 to be out not of mode-size. As we need a matching type
1694 to build a BIT_FIELD_REF, use a nonstandard integer type as
1696 type = lang_hooks.types.type_for_size (size, 1);
1697 if (!type || TYPE_PRECISION (type) != size)
1698 type = build_nonstandard_integer_type (size, 1);
1700 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1701 bitsize_int (size), bitsize_int (bit));
1703 block = instantiate_missing_elements_1 (elt, var, type);
1704 gcc_assert (block && block->is_scalar);
1706 var = block->replacement;
1707 block->in_bitfld_block = 2;
1709 /* Add the member fields to the group, such that they access
1710 portions of the group variable. */
1712 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1714 tree field_type = canon_type_for_field (f, elt->element);
1715 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1717 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1719 fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
1720 bitsize_int (TYPE_PRECISION (field_type)),
1722 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1725 (DECL_FIELD_BIT_OFFSET (f)))
1727 (TREE_OPERAND (block->element, 2))))
1729 fld->in_bitfld_block = 1;
1736 instantiate_missing_elements (struct sra_elt *elt)
1738 tree type = elt->type;
1740 switch (TREE_CODE (type))
1745 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1746 if (TREE_CODE (f) == FIELD_DECL)
1748 tree last = try_instantiate_multiple_fields (elt, f);
1756 instantiate_missing_elements_1 (elt, f,
1757 canon_type_for_field
1765 tree i, max, subtype;
1767 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1768 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1769 subtype = TREE_TYPE (type);
1773 instantiate_missing_elements_1 (elt, i, subtype);
1774 if (tree_int_cst_equal (i, max))
1776 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1783 type = TREE_TYPE (type);
1784 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1785 instantiate_missing_elements_1 (elt, integer_one_node, type);
1793 /* Return true if there is only one non aggregate field in the record, TYPE.
1794 Return false otherwise. */
1797 single_scalar_field_in_record_p (tree type)
1801 if (TREE_CODE (type) != RECORD_TYPE)
1804 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1805 if (TREE_CODE (field) == FIELD_DECL)
1809 if (num_fields == 2)
1812 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1819 /* Make one pass across an element tree deciding whether to perform block
1820 or element copies. If we decide on element copies, instantiate all
1821 elements. Return true if there are any instantiated sub-elements. */
1824 decide_block_copy (struct sra_elt *elt)
1829 /* We shouldn't be invoked on groups of sub-elements as they must
1830 behave like their parent as far as block copy is concerned. */
1831 gcc_assert (!elt->is_group);
1833 /* If scalarization is disabled, respect it. */
1834 if (elt->cannot_scalarize)
1836 elt->use_block_copy = 1;
1840 fputs ("Scalarization disabled for ", dump_file);
1841 dump_sra_elt_name (dump_file, elt);
1842 fputc ('\n', dump_file);
1845 /* Disable scalarization of sub-elements */
1846 for (c = elt->children; c; c = c->sibling)
1848 c->cannot_scalarize = 1;
1849 decide_block_copy (c);
1852 /* Groups behave like their parent. */
1853 for (c = elt->groups; c; c = c->sibling)
1855 c->cannot_scalarize = 1;
1856 c->use_block_copy = 1;
1862 /* Don't decide if we've no uses and no groups. */
1863 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1866 else if (!elt->is_scalar)
1868 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1869 bool use_block_copy = true;
1871 /* Tradeoffs for COMPLEX types pretty much always make it better
1872 to go ahead and split the components. */
1873 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1874 use_block_copy = false;
1876 /* Don't bother trying to figure out the rest if the structure is
1877 so large we can't do easy arithmetic. This also forces block
1878 copies for variable sized structures. */
1879 else if (host_integerp (size_tree, 1))
1881 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1882 unsigned int max_size, max_count, inst_count, full_count;
1884 /* If the sra-max-structure-size parameter is 0, then the
1885 user has not overridden the parameter and we can choose a
1886 sensible default. */
1887 max_size = SRA_MAX_STRUCTURE_SIZE
1888 ? SRA_MAX_STRUCTURE_SIZE
1889 : MOVE_RATIO (optimize_function_for_speed_p (cfun)) * UNITS_PER_WORD;
1890 max_count = SRA_MAX_STRUCTURE_COUNT
1891 ? SRA_MAX_STRUCTURE_COUNT
1892 : MOVE_RATIO (optimize_function_for_speed_p (cfun));
1894 full_size = tree_low_cst (size_tree, 1);
1895 full_count = count_type_elements (elt->type, false);
1896 inst_count = sum_instantiated_sizes (elt, &inst_size);
1898 /* If there is only one scalar field in the record, don't block copy. */
1899 if (single_scalar_field_in_record_p (elt->type))
1900 use_block_copy = false;
1902 /* ??? What to do here. If there are two fields, and we've only
1903 instantiated one, then instantiating the other is clearly a win.
1904 If there are a large number of fields then the size of the copy
1905 is much more of a factor. */
1907 /* If the structure is small, and we've made copies, go ahead
1908 and instantiate, hoping that the copies will go away. */
1909 if (full_size <= max_size
1910 && (full_count - inst_count) <= max_count
1911 && elt->n_copies > elt->n_uses)
1912 use_block_copy = false;
1913 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1914 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1915 use_block_copy = false;
1917 /* In order to avoid block copy, we have to be able to instantiate
1918 all elements of the type. See if this is possible. */
1920 && (!can_completely_scalarize_p (elt)
1921 || !type_can_instantiate_all_elements (elt->type)))
1922 use_block_copy = true;
1925 elt->use_block_copy = use_block_copy;
1927 /* Groups behave like their parent. */
1928 for (c = elt->groups; c; c = c->sibling)
1929 c->use_block_copy = use_block_copy;
1933 fprintf (dump_file, "Using %s for ",
1934 use_block_copy ? "block-copy" : "element-copy");
1935 dump_sra_elt_name (dump_file, elt);
1936 fputc ('\n', dump_file);
1939 if (!use_block_copy)
1941 instantiate_missing_elements (elt);
1946 any_inst = elt->replacement != NULL;
1948 for (c = elt->children; c ; c = c->sibling)
1949 any_inst |= decide_block_copy (c);
1954 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1957 decide_instantiations (void)
1961 bitmap_head done_head;
1964 /* We cannot clear bits from a bitmap we're iterating over,
1965 so save up all the bits to clear until the end. */
1966 bitmap_initialize (&done_head, &bitmap_default_obstack);
1967 cleared_any = false;
1969 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1971 tree var = referenced_var (i);
1972 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1975 decide_instantiation_1 (elt, 0, 0);
1976 if (!decide_block_copy (elt))
1981 bitmap_set_bit (&done_head, i);
1988 bitmap_and_compl_into (sra_candidates, &done_head);
1989 bitmap_and_compl_into (needs_copy_in, &done_head);
1991 bitmap_clear (&done_head);
1993 mark_set_for_renaming (sra_candidates);
1996 fputc ('\n', dump_file);
2000 /* Phase Four: Update the function to match the replacements created. */
2002 /* Mark all the variables in virtual operands in all the statements in
2003 LIST for renaming. */
2006 mark_all_v_defs_seq (gimple_seq seq)
2008 gimple_stmt_iterator gsi;
2010 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
2011 update_stmt_if_modified (gsi_stmt (gsi));
2014 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2017 mark_no_warning (struct sra_elt *elt)
2019 if (!elt->all_no_warning)
2021 if (elt->replacement)
2022 TREE_NO_WARNING (elt->replacement) = 1;
2026 FOR_EACH_ACTUAL_CHILD (c, elt)
2027 mark_no_warning (c);
2029 elt->all_no_warning = true;
2033 /* Build a single level component reference to ELT rooted at BASE. */
2036 generate_one_element_ref (struct sra_elt *elt, tree base)
2038 switch (TREE_CODE (TREE_TYPE (base)))
2042 tree field = elt->element;
2044 /* We can't test elt->in_bitfld_block here because, when this is
2045 called from instantiate_element, we haven't set this field
2047 if (TREE_CODE (field) == BIT_FIELD_REF)
2049 tree ret = unshare_expr (field);
2050 TREE_OPERAND (ret, 0) = base;
2054 /* Watch out for compatible records with differing field lists. */
2055 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2056 field = find_compatible_field (TREE_TYPE (base), field);
2058 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2062 if (TREE_CODE (elt->element) == RANGE_EXPR)
2063 return build4 (ARRAY_RANGE_REF, elt->type, base,
2064 TREE_OPERAND (elt->element, 0), NULL, NULL);
2066 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2069 if (elt->element == integer_zero_node)
2070 return build1 (REALPART_EXPR, elt->type, base);
2072 return build1 (IMAGPART_EXPR, elt->type, base);
2079 /* Build a full component reference to ELT rooted at its native variable. */
2082 generate_element_ref (struct sra_elt *elt)
2085 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2087 return elt->element;
2090 /* Return true if BF is a bit-field that we can handle like a scalar. */
2093 scalar_bitfield_p (tree bf)
2095 return (TREE_CODE (bf) == BIT_FIELD_REF
2096 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2097 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2098 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2099 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2100 (TREE_OPERAND (bf, 0))))
2101 <= BITS_PER_WORD)))));
2104 /* Create an assignment statement from SRC to DST. */
2107 sra_build_assignment (tree dst, tree src)
2110 gimple_seq seq = NULL, seq2 = NULL;
2111 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2112 to do a much better job at optimizing the code.
2113 From dst = BIT_FIELD_REF <var, sz, off> we produce
2115 SR.1 = (scalar type) var;
2117 SR.3 = SR.2 & ((1 << sz) - 1);
2118 ... possible sign extension of SR.3 ...
2119 dst = (destination type) SR.3;
2121 if (scalar_bitfield_p (src))
2123 tree var, shift, width;
2125 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2126 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2127 struct gimplify_ctx gctx;
2129 var = TREE_OPERAND (src, 0);
2130 width = TREE_OPERAND (src, 1);
2131 /* The offset needs to be adjusted to a right shift quantity
2132 depending on the endianness. */
2133 if (BYTES_BIG_ENDIAN)
2135 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2136 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2139 shift = TREE_OPERAND (src, 2);
2141 /* In weird cases we have non-integral types for the source or
2143 ??? For unknown reasons we also want an unsigned scalar type. */
2144 stype = TREE_TYPE (var);
2145 if (!INTEGRAL_TYPE_P (stype))
2146 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2147 (TYPE_SIZE (stype)), 1);
2148 else if (!TYPE_UNSIGNED (stype))
2149 stype = unsigned_type_for (stype);
2151 utype = TREE_TYPE (dst);
2152 if (!INTEGRAL_TYPE_P (utype))
2153 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2154 (TYPE_SIZE (utype)), 1);
2155 else if (!TYPE_UNSIGNED (utype))
2156 utype = unsigned_type_for (utype);
2158 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2159 we use for computation if we cannot use it directly. */
2160 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2161 var = fold_convert (stype, var);
2163 var = fold_build1 (VIEW_CONVERT_EXPR, stype, var);
2165 if (!integer_zerop (shift))
2166 var = fold_build2 (RSHIFT_EXPR, stype, var, shift);
2168 /* If we need a masking operation, produce one. */
2169 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2173 tree one = build_int_cst_wide (stype, 1, 0);
2174 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2175 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2176 var = fold_build2 (BIT_AND_EXPR, stype, var, mask);
2179 /* After shifting and masking, convert to the target type. */
2180 var = fold_convert (utype, var);
2182 /* Perform sign extension, if required.
2183 ??? This should never be necessary. */
2186 tree signbit = int_const_binop (LSHIFT_EXPR,
2187 build_int_cst_wide (utype, 1, 0),
2188 size_binop (MINUS_EXPR, width,
2189 bitsize_int (1)), 0);
2191 var = fold_build2 (BIT_XOR_EXPR, utype, var, signbit);
2192 var = fold_build2 (MINUS_EXPR, utype, var, signbit);
2195 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2198 /* Finally, move and convert to the destination. */
2199 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2200 var = fold_convert (TREE_TYPE (dst), var);
2202 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2204 push_gimplify_context (&gctx);
2205 gctx.allow_rhs_cond_expr = true;
2207 gimplify_assign (dst, var, &seq);
2209 if (gimple_referenced_vars (cfun))
2210 for (var = gctx.temps; var; var = TREE_CHAIN (var))
2212 add_referenced_var (var);
2213 mark_sym_for_renaming (var);
2215 pop_gimplify_context (NULL);
2220 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2221 if (CONVERT_EXPR_P (dst))
2224 src = fold_convert (TREE_TYPE (dst), src);
2226 /* It was hoped that we could perform some type sanity checking
2227 here, but since front-ends can emit accesses of fields in types
2228 different from their nominal types and copy structures containing
2229 them as a whole, we'd have to handle such differences here.
2230 Since such accesses under different types require compatibility
2231 anyway, there's little point in making tests and/or adding
2232 conversions to ensure the types of src and dst are the same.
2233 So we just assume type differences at this point are ok.
2234 The only exception we make here are pointer types, which can be different
2235 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2236 else if (POINTER_TYPE_P (TREE_TYPE (dst))
2237 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2238 src = fold_convert (TREE_TYPE (dst), src);
2240 /* ??? Only call the gimplifier if we need to. Otherwise we may
2241 end up substituting with DECL_VALUE_EXPR - see PR37380. */
2242 if (!handled_component_p (src)
2243 && !SSA_VAR_P (src))
2245 src = force_gimple_operand (src, &seq2, false, NULL_TREE);
2246 gimple_seq_add_seq (&seq, seq2);
2248 stmt = gimple_build_assign (dst, src);
2249 gimple_seq_add_stmt (&seq, stmt);
2253 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2254 takes care of assignments, but we must create copies for uses. */
2255 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2257 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2258 BIT_FIELD_REF, turn it into bit operations. */
2261 sra_build_bf_assignment (tree dst, tree src)
2263 tree var, type, utype, tmp, tmp2, tmp3;
2266 tree cst, cst2, mask;
2267 tree minshift, maxshift;
2269 if (TREE_CODE (dst) != BIT_FIELD_REF)
2270 return sra_build_assignment (dst, src);
2272 var = TREE_OPERAND (dst, 0);
2274 if (!scalar_bitfield_p (dst))
2275 return sra_build_assignment (REPLDUP (dst), src);
2279 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2280 cst2 = size_binop (PLUS_EXPR,
2281 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2284 if (BYTES_BIG_ENDIAN)
2286 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2287 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2295 type = TREE_TYPE (var);
2296 if (!INTEGRAL_TYPE_P (type))
2297 type = lang_hooks.types.type_for_size
2298 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2299 if (TYPE_UNSIGNED (type))
2302 utype = unsigned_type_for (type);
2304 mask = build_int_cst_wide (utype, 1, 0);
2305 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2306 cst = build_int_cst_wide (utype, 0, 0);
2308 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2309 if (integer_zerop (minshift))
2312 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2313 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2314 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2316 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2317 && !integer_zerop (mask))
2320 if (!is_gimple_variable (tmp))
2321 tmp = unshare_expr (var);
2323 TREE_NO_WARNING (var) = true;
2325 tmp2 = make_rename_temp (utype, "SR");
2327 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2328 tmp = fold_convert (utype, tmp);
2330 tmp = fold_build1 (VIEW_CONVERT_EXPR, utype, tmp);
2332 stmt = gimple_build_assign (tmp2, tmp);
2333 gimple_seq_add_stmt (&seq, stmt);
2338 if (!integer_zerop (mask))
2340 tmp = make_rename_temp (utype, "SR");
2341 stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
2343 gimple_seq_add_stmt (&seq, stmt);
2348 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2350 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2353 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2354 tmp_seq = sra_build_assignment (tmp2, src);
2355 gimple_seq_add_seq (&seq, tmp_seq);
2360 tmp2 = make_rename_temp
2361 (lang_hooks.types.type_for_size
2362 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2364 tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2365 TREE_TYPE (tmp2), src));
2366 gimple_seq_add_seq (&seq, tmp_seq);
2369 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2372 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2373 tmp3 = make_rename_temp (ut, "SR");
2374 tmp2 = fold_convert (ut, tmp2);
2375 tmp_seq = sra_build_assignment (tmp3, tmp2);
2376 gimple_seq_add_seq (&seq, tmp_seq);
2378 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2379 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2380 tmp2 = fold_convert (ut, tmp2);
2381 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2385 tmp3 = make_rename_temp (ut, "SR");
2386 tmp_seq = sra_build_assignment (tmp3, tmp2);
2387 gimple_seq_add_seq (&seq, tmp_seq);
2393 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2396 tmp3 = make_rename_temp (utype, "SR");
2397 tmp2 = fold_convert (utype, tmp2);
2398 tmp_seq = sra_build_assignment (tmp3, tmp2);
2399 gimple_seq_add_seq (&seq, tmp_seq);
2403 if (!integer_zerop (minshift))
2405 tmp3 = make_rename_temp (utype, "SR");
2406 stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
2408 gimple_seq_add_stmt (&seq, stmt);
2412 if (utype != TREE_TYPE (var))
2413 tmp3 = make_rename_temp (utype, "SR");
2416 stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
2418 gimple_seq_add_stmt (&seq, stmt);
2422 if (TREE_TYPE (var) == type)
2423 stmt = gimple_build_assign (var, fold_convert (type, tmp3));
2425 stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
2426 TREE_TYPE (var), tmp3));
2427 gimple_seq_add_stmt (&seq, stmt);
2433 /* Expand an assignment of SRC to the scalarized representation of
2434 ELT. If it is a field group, try to widen the assignment to cover
2435 the full variable. */
2438 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2440 tree dst = elt->replacement;
2441 tree var, tmp, cst, cst2;
2445 if (TREE_CODE (dst) != BIT_FIELD_REF
2446 || !elt->in_bitfld_block)
2447 return sra_build_assignment (REPLDUP (dst), src);
2449 var = TREE_OPERAND (dst, 0);
2451 /* Try to widen the assignment to the entire variable.
2452 We need the source to be a BIT_FIELD_REF as well, such that, for
2453 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2454 by design, conditions are met such that we can turn it into
2455 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2456 if (elt->in_bitfld_block == 2
2457 && TREE_CODE (src) == BIT_FIELD_REF)
2460 cst = TYPE_SIZE (TREE_TYPE (var));
2461 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2462 TREE_OPERAND (dst, 2));
2464 src = TREE_OPERAND (src, 0);
2466 /* Avoid full-width bit-fields. */
2467 if (integer_zerop (cst2)
2468 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2470 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2471 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2472 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2474 /* If a single conversion won't do, we'll need a statement
2476 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2477 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2482 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2483 src = fold_build1 (VIEW_CONVERT_EXPR,
2484 lang_hooks.types.type_for_size
2486 (TYPE_SIZE (TREE_TYPE (src))),
2488 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2490 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2491 stmt = gimple_build_assign (tmp, src);
2492 gimple_seq_add_stmt (&seq, stmt);
2494 tmp_seq = sra_build_assignment (var,
2495 fold_convert (TREE_TYPE (var),
2497 gimple_seq_add_seq (&seq, tmp_seq);
2502 src = fold_convert (TREE_TYPE (var), src);
2506 src = fold_convert (TREE_TYPE (var), tmp);
2509 return sra_build_assignment (var, src);
2512 return sra_build_bf_assignment (dst, src);
2515 /* Generate a set of assignment statements in *LIST_P to copy all
2516 instantiated elements under ELT to or from the equivalent structure
2517 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2518 true meaning to copy out of EXPR into ELT. */
2521 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2528 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2529 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2533 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2535 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2538 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2539 tmp_seq = sra_build_bf_assignment (expr, t);
2540 SSA_NAME_DEF_STMT (expr) = gimple_seq_last_stmt (tmp_seq);
2541 gimple_seq_add_seq (seq_p, tmp_seq);
2543 else if (elt->replacement)
2546 tmp_seq = sra_build_elt_assignment (elt, expr);
2548 tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2549 gimple_seq_add_seq (seq_p, tmp_seq);
2553 FOR_EACH_ACTUAL_CHILD (c, elt)
2555 t = generate_one_element_ref (c, unshare_expr (expr));
2556 generate_copy_inout (c, copy_out, t, seq_p);
2561 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2562 elements under SRC to their counterparts under DST. There must be a 1-1
2563 correspondence of instantiated elements. */
2566 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
2568 struct sra_elt *dc, *sc;
2570 FOR_EACH_ACTUAL_CHILD (dc, dst)
2572 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2573 if (!sc && dc->in_bitfld_block == 2)
2575 struct sra_elt *dcs;
2577 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2579 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2581 generate_element_copy (dcs, sc, seq_p);
2587 /* If DST and SRC are structs with the same elements, but do not have
2588 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2589 will fail. Try harder by finding the corresponding FIELD_DECL
2595 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2596 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2597 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2598 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2599 DECL_FIELD_OFFSET (dc->element)) > 0
2600 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2601 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2602 && simple_cst_equal (DECL_SIZE (f),
2603 DECL_SIZE (dc->element)) > 0
2604 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2606 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2607 && POINTER_TYPE_P (TREE_TYPE (f)))))
2609 gcc_assert (f != NULL_TREE);
2610 sc = lookup_element (src, f, NULL, NO_INSERT);
2613 generate_element_copy (dc, sc, seq_p);
2616 if (dst->replacement)
2620 gcc_assert (src->replacement);
2622 tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2623 gimple_seq_add_seq (seq_p, tmp_seq);
2627 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2628 elements under ELT. In addition, do not assign to elements that have been
2629 marked VISITED but do reset the visited flag; this allows easy coordination
2630 with generate_element_init. */
2633 generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
2639 elt->visited = false;
2643 if (!elt->in_bitfld_block)
2644 FOR_EACH_ACTUAL_CHILD (c, elt)
2645 generate_element_zero (c, seq_p);
2647 if (elt->replacement)
2652 gcc_assert (elt->is_scalar);
2653 t = fold_convert (elt->type, integer_zero_node);
2655 tmp_seq = sra_build_elt_assignment (elt, t);
2656 gimple_seq_add_seq (seq_p, tmp_seq);
2660 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2661 Add the result to *LIST_P. */
2664 generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2666 gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
2667 gimple_seq_add_seq (seq_p, tmp_seq);
2670 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2671 elements under ELT with the contents of the initializer INIT. In addition,
2672 mark all assigned elements VISITED; this allows easy coordination with
2673 generate_element_zero. Return false if we found a case we couldn't
2677 generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2680 enum tree_code init_code;
2681 struct sra_elt *sub;
2683 unsigned HOST_WIDE_INT idx;
2684 tree value, purpose;
2686 /* We can be passed DECL_INITIAL of a static variable. It might have a
2687 conversion, which we strip off here. */
2688 STRIP_USELESS_TYPE_CONVERSION (init);
2689 init_code = TREE_CODE (init);
2693 if (elt->replacement)
2695 generate_one_element_init (elt, init, seq_p);
2696 elt->visited = true;
2705 FOR_EACH_ACTUAL_CHILD (sub, elt)
2707 if (sub->element == integer_zero_node)
2708 t = (init_code == COMPLEX_EXPR
2709 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2711 t = (init_code == COMPLEX_EXPR
2712 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2713 result &= generate_element_init_1 (sub, t, seq_p);
2718 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2720 /* Array constructors are routinely created with NULL indices. */
2721 if (purpose == NULL_TREE)
2726 if (TREE_CODE (purpose) == RANGE_EXPR)
2728 tree lower = TREE_OPERAND (purpose, 0);
2729 tree upper = TREE_OPERAND (purpose, 1);
2733 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2735 result &= generate_element_init_1 (sub, value, seq_p);
2736 if (tree_int_cst_equal (lower, upper))
2738 lower = int_const_binop (PLUS_EXPR, lower,
2739 integer_one_node, true);
2744 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2746 result &= generate_element_init_1 (sub, value, seq_p);
2752 elt->visited = true;
2759 /* A wrapper function for generate_element_init_1 that handles cleanup after
2763 generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2766 struct gimplify_ctx gctx;
2768 push_gimplify_context (&gctx);
2769 ret = generate_element_init_1 (elt, init, seq_p);
2770 pop_gimplify_context (NULL);
2772 /* The replacement can expose previously unreferenced variables. */
2775 gimple_stmt_iterator i;
2777 for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
2778 find_new_referenced_vars (gsi_stmt (i));
2784 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
2785 if BB has more than one edge, STMT will be replicated for each edge.
2786 Also, abnormal edges will be ignored. */
2789 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
2793 unsigned n_copies = -1;
2795 FOR_EACH_EDGE (e, ei, bb->succs)
2796 if (!(e->flags & EDGE_ABNORMAL))
2799 FOR_EACH_EDGE (e, ei, bb->succs)
2800 if (!(e->flags & EDGE_ABNORMAL))
2801 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
2804 /* Helper function to insert LIST before GSI, and set up line number info. */
2807 sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
2809 gimple stmt = gsi_stmt (*gsi);
2811 if (gimple_has_location (stmt))
2812 annotate_all_with_location (seq, gimple_location (stmt));
2813 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
2816 /* Similarly, but insert after GSI. Handles insertion onto edges as well. */
2819 sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
2821 gimple stmt = gsi_stmt (*gsi);
2823 if (gimple_has_location (stmt))
2824 annotate_all_with_location (seq, gimple_location (stmt));
2826 if (stmt_ends_bb_p (stmt))
2827 insert_edge_copies_seq (seq, gsi_bb (*gsi));
2829 gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
2832 /* Similarly, but replace the statement at GSI. */
2835 sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
2837 sra_insert_before (gsi, seq);
2838 unlink_stmt_vdef (gsi_stmt (*gsi));
2839 gsi_remove (gsi, false);
2840 if (gsi_end_p (*gsi))
2841 *gsi = gsi_last (gsi_seq (*gsi));
2846 /* Data structure that bitfield_overlaps_p fills in with information
2847 about the element passed in and how much of it overlaps with the
2848 bit-range passed it to. */
2850 struct bitfield_overlap_info
2852 /* The bit-length of an element. */
2855 /* The bit-position of the element in its parent. */
2858 /* The number of bits of the element that overlap with the incoming
2862 /* The first bit of the element that overlaps with the incoming bit
2867 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2868 expression (referenced as BF below) accesses any of the bits in FLD,
2869 false if it doesn't. If DATA is non-null, its field_len and
2870 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2871 field_len, field_pos> (referenced as BFLD below) represents the
2872 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2873 overlap_pos> represents the portion of the entire field that
2874 overlaps with BF. */
2877 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2878 struct bitfield_overlap_info *data)
2883 if (TREE_CODE (fld->element) == FIELD_DECL)
2885 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2886 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2887 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2888 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2890 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2892 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2893 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2895 else if (TREE_CODE (fld->element) == INTEGER_CST)
2897 tree domain_type = TYPE_DOMAIN (TREE_TYPE (fld->parent->element));
2898 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2899 fpos = fold_convert (bitsizetype, fld->element);
2900 if (domain_type && TYPE_MIN_VALUE (domain_type))
2901 fpos = size_binop (MINUS_EXPR, fpos,
2902 fold_convert (bitsizetype,
2903 TYPE_MIN_VALUE (domain_type)));
2904 fpos = size_binop (MULT_EXPR, flen, fpos);
2909 gcc_assert (host_integerp (blen, 1)
2910 && host_integerp (bpos, 1)
2911 && host_integerp (flen, 1)
2912 && host_integerp (fpos, 1));
2914 ret = ((!tree_int_cst_lt (fpos, bpos)
2915 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2917 || (!tree_int_cst_lt (bpos, fpos)
2918 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2928 data->field_len = flen;
2929 data->field_pos = fpos;
2931 fend = size_binop (PLUS_EXPR, fpos, flen);
2932 bend = size_binop (PLUS_EXPR, bpos, blen);
2934 if (tree_int_cst_lt (bend, fend))
2935 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
2937 data->overlap_len = NULL;
2939 if (tree_int_cst_lt (fpos, bpos))
2941 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
2942 data->overlap_len = size_binop (MINUS_EXPR,
2949 data->overlap_pos = NULL;
2955 /* Add to LISTP a sequence of statements that copies BLEN bits between
2956 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
2957 and at bit BPOS of ELT. The direction of the copy is given by
2961 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
2962 gimple_seq *seq_p, tree blen, tree bpos,
2963 struct sra_elt *elt)
2965 struct sra_elt *fld;
2966 struct bitfield_overlap_info flp;
2968 FOR_EACH_ACTUAL_CHILD (fld, elt)
2972 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
2975 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
2976 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
2978 if (fld->replacement)
2980 tree infld, invar, type;
2983 infld = fld->replacement;
2985 type = unsigned_type_for (TREE_TYPE (infld));
2986 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
2987 type = build_nonstandard_integer_type (TREE_INT_CST_LOW (flen), 1);
2989 if (TREE_CODE (infld) == BIT_FIELD_REF)
2991 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
2992 infld = TREE_OPERAND (infld, 0);
2994 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
2995 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
2996 DECL_SIZE (fld->element)))
2998 fpos = size_binop (PLUS_EXPR, fpos,
2999 TYPE_SIZE (TREE_TYPE (infld)));
3000 fpos = size_binop (MINUS_EXPR, fpos,
3001 DECL_SIZE (fld->element));
3004 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3006 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3007 if (flp.overlap_pos)
3008 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3009 invar = size_binop (PLUS_EXPR, invar, vpos);
3011 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3014 st = sra_build_bf_assignment (invar, infld);
3016 st = sra_build_bf_assignment (infld, invar);
3018 gimple_seq_add_seq (seq_p, st);
3022 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3023 sub = size_binop (PLUS_EXPR, vpos, sub);
3024 if (flp.overlap_pos)
3025 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3027 sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
3033 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3034 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3035 into the full variable, and to LISTAFTERP, if non-NULL, statements
3036 that copy the (presumably modified) overlapping portions of the
3037 full variable back to the scalarized variables. */
3040 sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
3041 gimple_seq *seq_after_p,
3042 tree blen, tree bpos,
3043 struct sra_elt *elt)
3045 struct sra_elt *fld;
3046 struct bitfield_overlap_info flp;
3048 FOR_EACH_ACTUAL_CHILD (fld, elt)
3049 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3051 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3053 generate_copy_inout (fld, false, generate_element_ref (fld),
3055 mark_no_warning (fld);
3057 generate_copy_inout (fld, true, generate_element_ref (fld),
3062 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3063 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3065 sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
3071 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3072 if elt is scalar, or some occurrence of ELT that requires a complete
3073 aggregate. IS_OUTPUT is true if ELT is being modified. */
3076 scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
3077 bool is_output, bool use_all)
3079 gimple stmt = gsi_stmt (*gsi);
3082 if (elt->replacement)
3084 tree replacement = elt->replacement;
3086 /* If we have a replacement, then updating the reference is as
3087 simple as modifying the existing statement in place. */
3089 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3090 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3091 && is_gimple_assign (stmt)
3092 && gimple_assign_lhs_ptr (stmt) == expr_p)
3095 /* RHS must be a single operand. */
3096 gcc_assert (gimple_assign_single_p (stmt));
3097 newseq = sra_build_elt_assignment (elt, gimple_assign_rhs1 (stmt));
3098 sra_replace (gsi, newseq);
3102 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3103 && is_gimple_assign (stmt)
3104 && gimple_assign_rhs1_ptr (stmt) == expr_p)
3106 tree tmp = make_rename_temp
3107 (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
3108 gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3110 sra_insert_before (gsi, newseq);
3114 update_stmt_if_modified (stmt);
3115 *expr_p = REPLDUP (replacement);
3118 else if (use_all && is_output
3119 && is_gimple_assign (stmt)
3120 && TREE_CODE (bfexpr
3121 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
3122 && &TREE_OPERAND (bfexpr, 0) == expr_p
3123 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3124 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3126 gimple_seq seq_before = NULL;
3127 gimple_seq seq_after = NULL;
3128 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3129 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3130 bool update = false;
3132 if (!elt->use_block_copy)
3134 tree type = TREE_TYPE (bfexpr);
3135 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3138 gimple_assign_set_lhs (stmt, var);
3141 if (!TYPE_UNSIGNED (type))
3143 type = unsigned_type_for (type);
3144 tmp = make_rename_temp (type, "SR");
3145 st = gimple_build_assign (tmp, fold_convert (type, var));
3146 gimple_seq_add_stmt (&seq_after, st);
3150 /* If VAR is wider than BLEN bits, it is padded at the
3151 most-significant end. We want to set VPOS such that
3152 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3153 least-significant BLEN bits of VAR. */
3154 if (BYTES_BIG_ENDIAN)
3155 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3157 vpos = bitsize_int (0);
3158 sra_explode_bitfield_assignment
3159 (var, vpos, false, &seq_after, blen, bpos, elt);
3162 sra_sync_for_bitfield_assignment
3163 (&seq_before, &seq_after, blen, bpos, elt);
3167 mark_all_v_defs_seq (seq_before);
3168 sra_insert_before (gsi, seq_before);
3172 mark_all_v_defs_seq (seq_after);
3173 sra_insert_after (gsi, seq_after);
3179 else if (use_all && !is_output
3180 && is_gimple_assign (stmt)
3181 && TREE_CODE (bfexpr
3182 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
3183 && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
3184 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3185 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3187 gimple_seq seq = NULL;
3188 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3189 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3190 bool update = false;
3192 if (!elt->use_block_copy)
3194 tree type = TREE_TYPE (bfexpr);
3195 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3198 gimple_assign_set_rhs1 (stmt, var);
3201 if (!TYPE_UNSIGNED (type))
3203 type = unsigned_type_for (type);
3204 tmp = make_rename_temp (type, "SR");
3205 st = gimple_build_assign (var,
3206 fold_convert (TREE_TYPE (var), tmp));
3210 gimple_seq_add_stmt (&seq,
3212 (var, build_int_cst_wide (type, 0, 0)));
3214 /* If VAR is wider than BLEN bits, it is padded at the
3215 most-significant end. We want to set VPOS such that
3216 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3217 least-significant BLEN bits of VAR. */
3218 if (BYTES_BIG_ENDIAN)
3219 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3221 vpos = bitsize_int (0);
3222 sra_explode_bitfield_assignment
3223 (var, vpos, true, &seq, blen, bpos, elt);
3226 gimple_seq_add_stmt (&seq, st);
3229 sra_sync_for_bitfield_assignment
3230 (&seq, NULL, blen, bpos, elt);
3234 mark_all_v_defs_seq (seq);
3235 sra_insert_before (gsi, seq);
3243 gimple_seq seq = NULL;
3245 /* Otherwise we need some copies. If ELT is being read, then we
3246 want to store all (modified) sub-elements back into the
3247 structure before the reference takes place. If ELT is being
3248 written, then we want to load the changed values back into
3249 our shadow variables. */
3250 /* ??? We don't check modified for reads, we just always write all of
3251 the values. We should be able to record the SSA number of the VOP
3252 for which the values were last read. If that number matches the
3253 SSA number of the VOP in the current statement, then we needn't
3254 emit an assignment. This would also eliminate double writes when
3255 a structure is passed as more than one argument to a function call.
3256 This optimization would be most effective if sra_walk_function
3257 processed the blocks in dominator order. */
3259 generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
3262 mark_all_v_defs_seq (seq);
3264 sra_insert_after (gsi, seq);
3267 sra_insert_before (gsi, seq);
3269 mark_no_warning (elt);
3274 /* Scalarize a COPY. To recap, this is an assignment statement between
3275 two scalarizable references, LHS_ELT and RHS_ELT. */
3278 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3279 gimple_stmt_iterator *gsi)
3284 if (lhs_elt->replacement && rhs_elt->replacement)
3286 /* If we have two scalar operands, modify the existing statement. */
3287 stmt = gsi_stmt (*gsi);
3289 /* See the commentary in sra_walk_function concerning
3290 RETURN_EXPR, and why we should never see one here. */
3291 gcc_assert (is_gimple_assign (stmt));
3292 gcc_assert (gimple_assign_copy_p (stmt));
3295 gimple_assign_set_lhs (stmt, lhs_elt->replacement);
3296 gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
3299 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3301 /* If either side requires a block copy, then sync the RHS back
3302 to the original structure, leave the original assignment
3303 statement (which will perform the block copy), then load the
3304 LHS values out of its now-updated original structure. */
3305 /* ??? Could perform a modified pair-wise element copy. That
3306 would at least allow those elements that are instantiated in
3307 both structures to be optimized well. */
3310 generate_copy_inout (rhs_elt, false,
3311 generate_element_ref (rhs_elt), &seq);
3314 mark_all_v_defs_seq (seq);
3315 sra_insert_before (gsi, seq);
3319 generate_copy_inout (lhs_elt, true,
3320 generate_element_ref (lhs_elt), &seq);
3323 mark_all_v_defs_seq (seq);
3324 sra_insert_after (gsi, seq);
3329 /* Otherwise both sides must be fully instantiated. In which
3330 case perform pair-wise element assignments and replace the
3331 original block copy statement. */
3333 stmt = gsi_stmt (*gsi);
3334 update_stmt_if_modified (stmt);
3337 generate_element_copy (lhs_elt, rhs_elt, &seq);
3339 mark_all_v_defs_seq (seq);
3340 sra_replace (gsi, seq);
3344 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3345 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3346 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3350 scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
3353 gimple_seq seq = NULL, init_seq = NULL;
3355 /* Generate initialization statements for all members extant in the RHS. */
3358 /* Unshare the expression just in case this is from a decl's initial. */
3359 rhs = unshare_expr (rhs);
3360 result = generate_element_init (lhs_elt, rhs, &init_seq);
3365 /* If we failed to convert the entire initializer, then we must
3366 leave the structure assignment in place and must load values
3367 from the structure into the slots for which we did not find
3368 constants. The easiest way to do this is to generate a complete
3369 copy-out, and then follow that with the constant assignments
3370 that we were able to build. DCE will clean things up. */
3371 gimple_seq seq0 = NULL;
3372 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3374 gimple_seq_add_seq (&seq0, seq);
3379 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3380 a zero value. Initialize the rest of the instantiated elements. */
3381 generate_element_zero (lhs_elt, &seq);
3382 gimple_seq_add_seq (&seq, init_seq);
3385 if (lhs_elt->use_block_copy || !result)
3387 /* Since LHS is not fully instantiated, we must leave the structure
3388 assignment in place. Treating this case differently from a USE
3389 exposes constants to later optimizations. */
3392 mark_all_v_defs_seq (seq);
3393 sra_insert_after (gsi, seq);
3398 /* The LHS is fully instantiated. The list of initializations
3399 replaces the original structure assignment. */
3401 update_stmt_if_modified (gsi_stmt (*gsi));
3402 mark_all_v_defs_seq (seq);
3403 sra_replace (gsi, seq);
3407 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3408 on all INDIRECT_REFs. */
3411 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3415 if (TREE_CODE (t) == INDIRECT_REF)
3417 TREE_THIS_NOTRAP (t) = 1;
3420 else if (IS_TYPE_OR_DECL_P (t))
3426 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3427 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3428 if ELT is on the left-hand side. */
3431 scalarize_ldst (struct sra_elt *elt, tree other,
3432 gimple_stmt_iterator *gsi, bool is_output)
3434 /* Shouldn't have gotten called for a scalar. */
3435 gcc_assert (!elt->replacement);
3437 if (elt->use_block_copy)
3439 /* Since ELT is not fully instantiated, we have to leave the
3440 block copy in place. Treat this as a USE. */
3441 scalarize_use (elt, NULL, gsi, is_output, false);
3445 /* The interesting case is when ELT is fully instantiated. In this
3446 case we can have each element stored/loaded directly to/from the
3447 corresponding slot in OTHER. This avoids a block copy. */
3449 gimple_seq seq = NULL;
3450 gimple stmt = gsi_stmt (*gsi);
3452 update_stmt_if_modified (stmt);
3453 generate_copy_inout (elt, is_output, other, &seq);
3455 mark_all_v_defs_seq (seq);
3457 /* Preserve EH semantics. */
3458 if (stmt_ends_bb_p (stmt))
3460 gimple_stmt_iterator si;
3462 gimple_seq blist = NULL;
3463 bool thr = stmt_could_throw_p (stmt);
3465 /* If the last statement of this BB created an EH edge
3466 before scalarization, we have to locate the first
3467 statement that can throw in the new statement list and
3468 use that as the last statement of this BB, such that EH
3469 semantics is preserved. All statements up to this one
3470 are added to the same BB. All other statements in the
3471 list will be added to normal outgoing edges of the same
3472 BB. If they access any memory, it's the same memory, so
3473 we can assume they won't throw. */
3474 si = gsi_start (seq);
3475 for (first = gsi_stmt (si);
3476 thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
3477 first = gsi_stmt (si))
3479 gsi_remove (&si, false);
3480 gimple_seq_add_stmt (&blist, first);
3483 /* Extract the first remaining statement from LIST, this is
3484 the EH statement if there is one. */
3485 gsi_remove (&si, false);
3488 sra_insert_before (gsi, blist);
3490 /* Replace the old statement with this new representative. */
3491 gsi_replace (gsi, first, true);
3493 if (!gsi_end_p (si))
3495 /* If any reference would trap, then they all would. And more
3496 to the point, the first would. Therefore none of the rest
3497 will trap since the first didn't. Indicate this by
3498 iterating over the remaining statements and set
3499 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3502 walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
3505 while (!gsi_end_p (si));
3507 insert_edge_copies_seq (seq, gsi_bb (*gsi));
3511 sra_replace (gsi, seq);
3515 /* Generate initializations for all scalarizable parameters. */
3518 scalarize_parms (void)
3520 gimple_seq seq = NULL;
3524 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3526 tree var = referenced_var (i);
3527 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3528 generate_copy_inout (elt, true, var, &seq);
3533 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
3534 mark_all_v_defs_seq (seq);
3538 /* Entry point to phase 4. Update the function to match replacements. */
3541 scalarize_function (void)
3543 static const struct sra_walk_fns fns = {
3544 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3547 sra_walk_function (&fns);
3549 gsi_commit_edge_inserts ();
3553 /* Debug helper function. Print ELT in a nice human-readable format. */
3556 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3558 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3560 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3561 dump_sra_elt_name (f, elt->parent);
3566 dump_sra_elt_name (f, elt->parent);
3567 if (DECL_P (elt->element))
3569 if (TREE_CODE (elt->element) == FIELD_DECL)
3571 print_generic_expr (f, elt->element, dump_flags);
3573 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3574 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3575 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3576 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3577 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3578 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3579 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3580 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3582 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3583 TREE_INT_CST_LOW (elt->element));
3587 /* Likewise, but callable from the debugger. */
3590 debug_sra_elt_name (struct sra_elt *elt)
3592 dump_sra_elt_name (stderr, elt);
3593 fputc ('\n', stderr);
3597 sra_init_cache (void)
3599 if (sra_type_decomp_cache)
3602 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3603 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3607 /* Main entry point. */
3612 /* Initialize local variables. */
3613 gcc_obstack_init (&sra_obstack);
3614 sra_candidates = BITMAP_ALLOC (NULL);
3615 needs_copy_in = BITMAP_ALLOC (NULL);
3617 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3619 /* Scan. If we find anything, instantiate and scalarize. */
3620 if (find_candidates_for_sra ())
3623 decide_instantiations ();
3624 scalarize_function ();
3627 /* Free allocated memory. */
3628 htab_delete (sra_map);
3630 BITMAP_FREE (sra_candidates);
3631 BITMAP_FREE (needs_copy_in);
3632 BITMAP_FREE (sra_type_decomp_cache);
3633 BITMAP_FREE (sra_type_inst_cache);
3634 obstack_free (&sra_obstack, NULL);
3639 tree_sra_early (void)
3653 return flag_tree_sra != 0;
3656 struct gimple_opt_pass pass_sra_early =
3661 gate_sra, /* gate */
3662 tree_sra_early, /* execute */
3665 0, /* static_pass_number */
3666 TV_TREE_SRA, /* tv_id */
3667 PROP_cfg | PROP_ssa, /* properties_required */
3668 0, /* properties_provided */
3669 0, /* properties_destroyed */
3670 0, /* todo_flags_start */
3674 | TODO_verify_ssa /* todo_flags_finish */
3678 struct gimple_opt_pass pass_sra =
3683 gate_sra, /* gate */
3684 tree_sra, /* execute */
3687 0, /* static_pass_number */
3688 TV_TREE_SRA, /* tv_id */
3689 PROP_cfg | PROP_ssa, /* properties_required */
3690 0, /* properties_provided */
3691 0, /* properties_destroyed */
3692 TODO_update_address_taken, /* todo_flags_start */
3696 | TODO_verify_ssa /* todo_flags_finish */