1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
31 /* These RTL headers are needed for basic-block.h. */
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
48 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
113 /* The type of the element. */
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
127 /* True if TYPE is scalar. */
130 /* True if this element is a group of members of its parent. */
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
144 /* A flag for use with/after random access traversals. */
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
150 /* 1 if the element is a field that is part of a block, 2 if the field
151 is the block itself, 0 if it's neither. */
152 char in_bitfld_block;
155 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
157 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
158 for ((CHILD) = (ELT)->is_group \
159 ? next_child_for_group (NULL, (ELT)) \
162 (CHILD) = (ELT)->is_group \
163 ? next_child_for_group ((CHILD), (ELT)) \
166 /* Helper function for above macro. Return next child in group. */
167 static struct sra_elt *
168 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
170 gcc_assert (group->is_group);
172 /* Find the next child in the parent. */
174 child = child->sibling;
176 child = group->parent->children;
178 /* Skip siblings that do not belong to the group. */
181 tree g_elt = group->element;
182 if (TREE_CODE (g_elt) == RANGE_EXPR)
184 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
185 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
191 child = child->sibling;
197 /* Random access to the child of a parent is performed by hashing.
198 This prevents quadratic behavior, and allows SRA to function
199 reasonably on larger records. */
200 static htab_t sra_map;
202 /* All structures are allocated out of the following obstack. */
203 static struct obstack sra_obstack;
205 /* Debugging functions. */
206 static void dump_sra_elt_name (FILE *, struct sra_elt *);
207 extern void debug_sra_elt_name (struct sra_elt *);
209 /* Forward declarations. */
210 static tree generate_element_ref (struct sra_elt *);
211 static gimple_seq sra_build_assignment (tree dst, tree src);
212 static void mark_all_v_defs_seq (gimple_seq);
213 static void mark_all_v_defs_stmt (gimple);
216 /* Return true if DECL is an SRA candidate. */
219 is_sra_candidate_decl (tree decl)
221 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
224 /* Return true if TYPE is a scalar type. */
227 is_sra_scalar_type (tree type)
229 enum tree_code code = TREE_CODE (type);
230 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
231 || code == FIXED_POINT_TYPE
232 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
233 || code == POINTER_TYPE || code == OFFSET_TYPE
234 || code == REFERENCE_TYPE);
237 /* Return true if TYPE can be decomposed into a set of independent variables.
239 Note that this doesn't imply that all elements of TYPE can be
240 instantiated, just that if we decide to break up the type into
241 separate pieces that it can be done. */
244 sra_type_can_be_decomposed_p (tree type)
246 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
249 /* Avoid searching the same type twice. */
250 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
252 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
255 /* The type must have a definite nonzero size. */
256 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
257 || integer_zerop (TYPE_SIZE (type)))
260 /* The type must be a non-union aggregate. */
261 switch (TREE_CODE (type))
265 bool saw_one_field = false;
267 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
268 if (TREE_CODE (t) == FIELD_DECL)
270 /* Reject incorrectly represented bit fields. */
271 if (DECL_BIT_FIELD (t)
272 && INTEGRAL_TYPE_P (TREE_TYPE (t))
273 && (tree_low_cst (DECL_SIZE (t), 1)
274 != TYPE_PRECISION (TREE_TYPE (t))))
277 saw_one_field = true;
280 /* Record types must have at least one field. */
287 /* Array types must have a fixed lower and upper bound. */
288 t = TYPE_DOMAIN (type);
291 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
293 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
304 bitmap_set_bit (sra_type_decomp_cache, cache+0);
308 bitmap_set_bit (sra_type_decomp_cache, cache+1);
312 /* Returns true if the TYPE is one of the available va_list types.
313 Otherwise it returns false.
314 Note, that for multiple calling conventions there can be more
315 than just one va_list type present. */
318 is_va_list_type (tree type)
322 if (type == NULL_TREE)
324 h = targetm.canonical_va_list_type (type);
327 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (h))
332 /* Return true if DECL can be decomposed into a set of independent
333 (though not necessarily scalar) variables. */
336 decl_can_be_decomposed_p (tree var)
338 /* Early out for scalars. */
339 if (is_sra_scalar_type (TREE_TYPE (var)))
342 /* The variable must not be aliased. */
343 if (!is_gimple_non_addressable (var))
345 if (dump_file && (dump_flags & TDF_DETAILS))
347 fprintf (dump_file, "Cannot scalarize variable ");
348 print_generic_expr (dump_file, var, dump_flags);
349 fprintf (dump_file, " because it must live in memory\n");
354 /* The variable must not be volatile. */
355 if (TREE_THIS_VOLATILE (var))
357 if (dump_file && (dump_flags & TDF_DETAILS))
359 fprintf (dump_file, "Cannot scalarize variable ");
360 print_generic_expr (dump_file, var, dump_flags);
361 fprintf (dump_file, " because it is declared volatile\n");
366 /* We must be able to decompose the variable's type. */
367 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
369 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, "Cannot scalarize variable ");
372 print_generic_expr (dump_file, var, dump_flags);
373 fprintf (dump_file, " because its type cannot be decomposed\n");
378 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
379 confuse tree-stdarg.c, and we won't be able to figure out which and
380 how many arguments are accessed. This really should be improved in
381 tree-stdarg.c, as the decomposition is truly a win. This could also
382 be fixed if the stdarg pass ran early, but this can't be done until
383 we've aliasing information early too. See PR 30791. */
384 if (early_sra && is_va_list_type (TREE_TYPE (var)))
390 /* Return true if TYPE can be *completely* decomposed into scalars. */
393 type_can_instantiate_all_elements (tree type)
395 if (is_sra_scalar_type (type))
397 if (!sra_type_can_be_decomposed_p (type))
400 switch (TREE_CODE (type))
404 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
407 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
409 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
412 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
413 if (TREE_CODE (f) == FIELD_DECL)
415 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
417 bitmap_set_bit (sra_type_inst_cache, cache+1);
422 bitmap_set_bit (sra_type_inst_cache, cache+0);
427 return type_can_instantiate_all_elements (TREE_TYPE (type));
437 /* Test whether ELT or some sub-element cannot be scalarized. */
440 can_completely_scalarize_p (struct sra_elt *elt)
444 if (elt->cannot_scalarize)
447 for (c = elt->children; c; c = c->sibling)
448 if (!can_completely_scalarize_p (c))
451 for (c = elt->groups; c; c = c->sibling)
452 if (!can_completely_scalarize_p (c))
459 /* A simplified tree hashing algorithm that only handles the types of
460 trees we expect to find in sra_elt->element. */
463 sra_hash_tree (tree t)
467 switch (TREE_CODE (t))
476 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
480 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
481 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
485 /* We can have types that are compatible, but have different member
486 lists, so we can't hash fields by ID. Use offsets instead. */
487 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
488 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
492 /* Don't take operand 0 into account, that's our parent. */
493 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
494 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
504 /* Hash function for type SRA_PAIR. */
507 sra_elt_hash (const void *x)
509 const struct sra_elt *const e = (const struct sra_elt *) x;
510 const struct sra_elt *p;
513 h = sra_hash_tree (e->element);
515 /* Take into account everything except bitfield blocks back up the
516 chain. Given that chain lengths are rarely very long, this
517 should be acceptable. If we truly identify this as a performance
518 problem, it should work to hash the pointer value
520 for (p = e->parent; p ; p = p->parent)
521 if (!p->in_bitfld_block)
522 h = (h * 65521) ^ sra_hash_tree (p->element);
527 /* Equality function for type SRA_PAIR. */
530 sra_elt_eq (const void *x, const void *y)
532 const struct sra_elt *const a = (const struct sra_elt *) x;
533 const struct sra_elt *const b = (const struct sra_elt *) y;
535 const struct sra_elt *ap = a->parent;
536 const struct sra_elt *bp = b->parent;
539 while (ap->in_bitfld_block)
542 while (bp->in_bitfld_block)
553 if (TREE_CODE (ae) != TREE_CODE (be))
556 switch (TREE_CODE (ae))
561 /* These are all pointer unique. */
565 /* Integers are not pointer unique, so compare their values. */
566 return tree_int_cst_equal (ae, be);
570 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
571 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
574 /* Fields are unique within a record, but not between
575 compatible records. */
576 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
578 return fields_compatible_p (ae, be);
582 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
583 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
590 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
591 may be null, in which case CHILD must be a DECL. */
593 static struct sra_elt *
594 lookup_element (struct sra_elt *parent, tree child, tree type,
595 enum insert_option insert)
597 struct sra_elt dummy;
598 struct sra_elt **slot;
602 dummy.parent = parent->is_group ? parent->parent : parent;
605 dummy.element = child;
607 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
608 if (!slot && insert == NO_INSERT)
612 if (!elt && insert == INSERT)
614 *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
615 memset (elt, 0, sizeof (*elt));
617 elt->parent = parent;
618 elt->element = child;
620 elt->is_scalar = is_sra_scalar_type (type);
624 if (IS_ELEMENT_FOR_GROUP (elt->element))
626 elt->is_group = true;
627 elt->sibling = parent->groups;
628 parent->groups = elt;
632 elt->sibling = parent->children;
633 parent->children = elt;
637 /* If this is a parameter, then if we want to scalarize, we have
638 one copy from the true function parameter. Count it now. */
639 if (TREE_CODE (child) == PARM_DECL)
642 bitmap_set_bit (needs_copy_in, DECL_UID (child));
649 /* Create or return the SRA_ELT structure for EXPR if the expression
650 refers to a scalarizable variable. */
652 static struct sra_elt *
653 maybe_lookup_element_for_expr (tree expr)
658 switch (TREE_CODE (expr))
663 if (is_sra_candidate_decl (expr))
664 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
668 /* We can't scalarize variable array indices. */
669 if (in_array_bounds_p (expr))
670 child = TREE_OPERAND (expr, 1);
675 case ARRAY_RANGE_REF:
676 /* We can't scalarize variable array indices. */
677 if (range_in_array_bounds_p (expr))
679 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
680 child = build2 (RANGE_EXPR, integer_type_node,
681 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
689 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
690 /* Don't look through unions. */
691 if (TREE_CODE (type) != RECORD_TYPE)
693 /* Neither through variable-sized records. */
694 if (TYPE_SIZE (type) == NULL_TREE
695 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
697 child = TREE_OPERAND (expr, 1);
702 child = integer_zero_node;
705 child = integer_one_node;
712 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
714 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
719 /* Functions to walk just enough of the tree to see all scalarizable
720 references, and categorize them. */
722 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
723 various kinds of references seen. In all cases, *GSI is an iterator
724 pointing to the statement being processed. */
727 /* Invoked when ELT is required as a unit. Note that ELT might refer to
728 a leaf node, in which case this is a simple scalar reference. *EXPR_P
729 points to the location of the expression. IS_OUTPUT is true if this
730 is a left-hand-side reference. USE_ALL is true if we saw something we
731 couldn't quite identify and had to force the use of the entire object. */
732 void (*use) (struct sra_elt *elt, tree *expr_p,
733 gimple_stmt_iterator *gsi, bool is_output, bool use_all);
735 /* Invoked when we have a copy between two scalarizable references. */
736 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
737 gimple_stmt_iterator *gsi);
739 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
740 in which case it should be treated as an empty CONSTRUCTOR. */
741 void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
743 /* Invoked when we have a copy between one scalarizable reference ELT
744 and one non-scalarizable reference OTHER without side-effects.
745 IS_OUTPUT is true if ELT is on the left-hand side. */
746 void (*ldst) (struct sra_elt *elt, tree other,
747 gimple_stmt_iterator *gsi, bool is_output);
749 /* True during phase 2, false during phase 4. */
750 /* ??? This is a hack. */
754 #ifdef ENABLE_CHECKING
755 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
758 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
759 void *data ATTRIBUTE_UNUSED)
762 enum tree_code code = TREE_CODE (t);
764 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
767 if (is_sra_candidate_decl (t))
777 /* Walk most expressions looking for a scalarizable aggregate.
778 If we find one, invoke FNS->USE. */
781 sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
782 const struct sra_walk_fns *fns)
786 bool disable_scalarization = false;
787 bool use_all_p = false;
789 /* We're looking to collect a reference expression between EXPR and INNER,
790 such that INNER is a scalarizable decl and all other nodes through EXPR
791 are references that we can scalarize. If we come across something that
792 we can't scalarize, we reset EXPR. This has the effect of making it
793 appear that we're referring to the larger expression as a whole. */
796 switch (TREE_CODE (inner))
801 /* If there is a scalarizable decl at the bottom, then process it. */
802 if (is_sra_candidate_decl (inner))
804 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
805 if (disable_scalarization)
806 elt->cannot_scalarize = true;
808 fns->use (elt, expr_p, gsi, is_output, use_all_p);
813 /* Non-constant index means any member may be accessed. Prevent the
814 expression from being scalarized. If we were to treat this as a
815 reference to the whole array, we can wind up with a single dynamic
816 index reference inside a loop being overridden by several constant
817 index references during loop setup. It's possible that this could
818 be avoided by using dynamic usage counts based on BB trip counts
819 (based on loop analysis or profiling), but that hardly seems worth
821 /* ??? Hack. Figure out how to push this into the scan routines
822 without duplicating too much code. */
823 if (!in_array_bounds_p (inner))
825 disable_scalarization = true;
828 /* ??? Are we assured that non-constant bounds and stride will have
829 the same value everywhere? I don't think Fortran will... */
830 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
832 inner = TREE_OPERAND (inner, 0);
835 case ARRAY_RANGE_REF:
836 if (!range_in_array_bounds_p (inner))
838 disable_scalarization = true;
841 /* ??? See above non-constant bounds and stride . */
842 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
844 inner = TREE_OPERAND (inner, 0);
849 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
850 /* Don't look through unions. */
851 if (TREE_CODE (type) != RECORD_TYPE)
853 /* Neither through variable-sized records. */
854 if (TYPE_SIZE (type) == NULL_TREE
855 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
857 inner = TREE_OPERAND (inner, 0);
863 inner = TREE_OPERAND (inner, 0);
867 /* A bit field reference to a specific vector is scalarized but for
868 ones for inputs need to be marked as used on the left hand size so
869 when we scalarize it, we can mark that variable as non renamable. */
871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
874 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
876 elt->is_vector_lhs = true;
879 /* A bit field reference (access to *multiple* fields simultaneously)
880 is not currently scalarized. Consider this an access to the full
881 outer element, to which walk_tree will bring us next. */
885 /* Similarly, a nop explicitly wants to look at an object in a
886 type other than the one we've scalarized. */
889 case VIEW_CONVERT_EXPR:
890 /* Likewise for a view conversion, but with an additional twist:
891 it can be on the LHS and, in this case, an access to the full
892 outer element would mean a killing def. So we need to punt
893 if we haven't already a full access to the current element,
894 because we cannot pretend to have a killing def if we only
895 have a partial access at some level. */
896 if (is_output && !use_all_p && inner != expr)
897 disable_scalarization = true;
901 /* This is a transparent wrapper. The entire inner expression really
906 expr_p = &TREE_OPERAND (inner, 0);
907 inner = expr = *expr_p;
912 #ifdef ENABLE_CHECKING
913 /* Validate that we're not missing any references. */
914 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
920 /* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
921 If we find one, invoke FNS->USE. */
924 sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
925 const struct sra_walk_fns *fns)
928 int nargs = gimple_call_num_args (stmt);
930 for (i = 0; i < nargs; i++)
931 sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
933 if (gimple_call_lhs (stmt))
934 sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
937 /* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
938 aggregates. If we find one, invoke FNS->USE. */
941 sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
942 const struct sra_walk_fns *fns)
945 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
946 sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
947 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
948 sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
951 /* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately. */
954 sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
955 const struct sra_walk_fns *fns)
957 struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
960 /* If there is more than 1 element on the RHS, only walk the lhs. */
961 if (!gimple_assign_single_p (stmt))
963 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
967 lhs = gimple_assign_lhs (stmt);
968 rhs = gimple_assign_rhs1 (stmt);
969 lhs_elt = maybe_lookup_element_for_expr (lhs);
970 rhs_elt = maybe_lookup_element_for_expr (rhs);
972 /* If both sides are scalarizable, this is a COPY operation. */
973 if (lhs_elt && rhs_elt)
975 fns->copy (lhs_elt, rhs_elt, gsi);
979 /* If the RHS is scalarizable, handle it. There are only two cases. */
982 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
983 fns->ldst (rhs_elt, lhs, gsi, false);
985 fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
988 /* If it isn't scalarizable, there may be scalarizable variables within, so
989 check for a call or else walk the RHS to see if we need to do any
990 copy-in operations. We need to do it before the LHS is scalarized so
991 that the statements get inserted in the proper place, before any
992 copy-out operations. */
994 sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
996 /* Likewise, handle the LHS being scalarizable. We have cases similar
997 to those above, but also want to handle RHS being constant. */
1000 /* If this is an assignment from a constant, or constructor, then
1001 we have access to all of the elements individually. Invoke INIT. */
1002 if (TREE_CODE (rhs) == COMPLEX_EXPR
1003 || TREE_CODE (rhs) == COMPLEX_CST
1004 || TREE_CODE (rhs) == CONSTRUCTOR)
1005 fns->init (lhs_elt, rhs, gsi);
1007 /* If this is an assignment from read-only memory, treat this as if
1008 we'd been passed the constructor directly. Invoke INIT. */
1009 else if (TREE_CODE (rhs) == VAR_DECL
1010 && TREE_STATIC (rhs)
1011 && !DECL_EXTERNAL (rhs)
1012 && TREE_READONLY (rhs)
1013 && targetm.binds_local_p (rhs))
1014 fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
1016 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1017 The lvalue requirement prevents us from trying to directly scalarize
1018 the result of a function call. Which would result in trying to call
1019 the function multiple times, and other evil things. */
1020 else if (!lhs_elt->is_scalar
1021 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1022 fns->ldst (lhs_elt, rhs, gsi, true);
1024 /* Otherwise we're being used in some context that requires the
1025 aggregate to be seen as a whole. Invoke USE. */
1027 fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
1030 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1031 whole is not a scalarizable reference. There may be occurrences of
1032 scalarizable variables within, which implies a USE. */
1034 sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
1037 /* Entry point to the walk functions. Search the entire function,
1038 invoking the callbacks in FNS on each of the references to
1039 scalarizable variables. */
1042 sra_walk_function (const struct sra_walk_fns *fns)
1045 gimple_stmt_iterator si, ni;
1047 /* ??? Phase 4 could derive some benefit to walking the function in
1048 dominator tree order. */
1051 for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
1055 stmt = gsi_stmt (si);
1060 /* If the statement has no virtual operands, then it doesn't
1061 make any structure references that we care about. */
1062 if (gimple_aliases_computed_p (cfun)
1063 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1066 switch (gimple_code (stmt))
1069 /* If we have "return <retval>" then the return value is
1070 already exposed for our pleasure. Walk it as a USE to
1071 force all the components back in place for the return.
1073 if (gimple_return_retval (stmt) == NULL_TREE)
1076 sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
1081 sra_walk_gimple_assign (stmt, &si, fns);
1084 sra_walk_gimple_call (stmt, &si, fns);
1087 sra_walk_gimple_asm (stmt, &si, fns);
1096 /* Phase One: Scan all referenced variables in the program looking for
1097 structures that could be decomposed. */
1100 find_candidates_for_sra (void)
1102 bool any_set = false;
1104 referenced_var_iterator rvi;
1106 FOR_EACH_REFERENCED_VAR (var, rvi)
1108 if (decl_can_be_decomposed_p (var))
1110 bitmap_set_bit (sra_candidates, DECL_UID (var));
1119 /* Phase Two: Scan all references to scalarizable variables. Count the
1120 number of times they are used or copied respectively. */
1122 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1123 considered a copy, because we can decompose the reference such that
1124 the sub-elements needn't be contiguous. */
1127 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1128 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1129 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1135 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1136 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1138 lhs_elt->n_copies += 1;
1139 rhs_elt->n_copies += 1;
1143 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1144 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
1146 lhs_elt->n_copies += 1;
1150 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1151 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1152 bool is_output ATTRIBUTE_UNUSED)
1157 /* Dump the values we collected during the scanning phase. */
1160 scan_dump (struct sra_elt *elt)
1164 dump_sra_elt_name (dump_file, elt);
1165 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1167 for (c = elt->children; c ; c = c->sibling)
1170 for (c = elt->groups; c ; c = c->sibling)
1174 /* Entry point to phase 2. Scan the entire function, building up
1175 scalarization data structures, recording copies and uses. */
1178 scan_function (void)
1180 static const struct sra_walk_fns fns = {
1181 scan_use, scan_copy, scan_init, scan_ldst, true
1185 sra_walk_function (&fns);
1187 if (dump_file && (dump_flags & TDF_DETAILS))
1191 fputs ("\nScan results:\n", dump_file);
1192 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1194 tree var = referenced_var (i);
1195 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1199 fputc ('\n', dump_file);
1203 /* Phase Three: Make decisions about which variables to scalarize, if any.
1204 All elements to be scalarized have replacement variables made for them. */
1206 /* A subroutine of build_element_name. Recursively build the element
1207 name on the obstack. */
1210 build_element_name_1 (struct sra_elt *elt)
1217 build_element_name_1 (elt->parent);
1218 obstack_1grow (&sra_obstack, '$');
1220 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1222 if (elt->element == integer_zero_node)
1223 obstack_grow (&sra_obstack, "real", 4);
1225 obstack_grow (&sra_obstack, "imag", 4);
1231 if (TREE_CODE (t) == INTEGER_CST)
1233 /* ??? Eh. Don't bother doing double-wide printing. */
1234 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1235 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1237 else if (TREE_CODE (t) == BIT_FIELD_REF)
1239 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1240 tree_low_cst (TREE_OPERAND (t, 2), 1));
1241 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1242 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1243 tree_low_cst (TREE_OPERAND (t, 1), 1));
1244 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1248 tree name = DECL_NAME (t);
1250 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1251 IDENTIFIER_LENGTH (name));
1254 sprintf (buffer, "D%u", DECL_UID (t));
1255 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1260 /* Construct a pretty variable name for an element's replacement variable.
1261 The name is built on the obstack. */
1264 build_element_name (struct sra_elt *elt)
1266 build_element_name_1 (elt);
1267 obstack_1grow (&sra_obstack, '\0');
1268 return XOBFINISH (&sra_obstack, char *);
1271 /* Instantiate an element as an independent variable. */
1274 instantiate_element (struct sra_elt *elt)
1276 struct sra_elt *base_elt;
1278 bool nowarn = TREE_NO_WARNING (elt->element);
1280 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1282 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1283 base = base_elt->element;
1285 elt->replacement = var = make_rename_temp (elt->type, "SR");
1287 if (DECL_P (elt->element)
1288 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1290 DECL_SIZE (var) = DECL_SIZE (elt->element);
1291 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1293 elt->in_bitfld_block = 1;
1294 elt->replacement = fold_build3 (BIT_FIELD_REF, elt->type, var,
1297 ? size_binop (MINUS_EXPR,
1298 TYPE_SIZE (elt->type),
1303 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1304 they are not a gimple register. */
1305 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1306 DECL_GIMPLE_REG_P (var) = 0;
1308 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1309 DECL_ARTIFICIAL (var) = 1;
1311 if (TREE_THIS_VOLATILE (elt->type))
1313 TREE_THIS_VOLATILE (var) = 1;
1314 TREE_SIDE_EFFECTS (var) = 1;
1317 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1319 char *pretty_name = build_element_name (elt);
1320 DECL_NAME (var) = get_identifier (pretty_name);
1321 obstack_free (&sra_obstack, pretty_name);
1323 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1324 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1326 DECL_IGNORED_P (var) = 0;
1327 TREE_NO_WARNING (var) = nowarn;
1331 DECL_IGNORED_P (var) = 1;
1332 /* ??? We can't generate any warning that would be meaningful. */
1333 TREE_NO_WARNING (var) = 1;
1336 /* Zero-initialize bit-field scalarization variables, to avoid
1337 triggering undefined behavior. */
1338 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1339 || (var != elt->replacement
1340 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1342 gimple_seq init = sra_build_assignment (var,
1343 fold_convert (TREE_TYPE (var),
1346 insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
1347 mark_all_v_defs_seq (init);
1352 fputs (" ", dump_file);
1353 dump_sra_elt_name (dump_file, elt);
1354 fputs (" -> ", dump_file);
1355 print_generic_expr (dump_file, var, dump_flags);
1356 fputc ('\n', dump_file);
1360 /* Make one pass across an element tree deciding whether or not it's
1361 profitable to instantiate individual leaf scalars.
1363 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1364 fields all the way up the tree. */
1367 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1368 unsigned int parent_copies)
1370 if (dump_file && !elt->parent)
1372 fputs ("Initial instantiation for ", dump_file);
1373 dump_sra_elt_name (dump_file, elt);
1374 fputc ('\n', dump_file);
1377 if (elt->cannot_scalarize)
1382 /* The decision is simple: instantiate if we're used more frequently
1383 than the parent needs to be seen as a complete unit. */
1384 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1385 instantiate_element (elt);
1389 struct sra_elt *c, *group;
1390 unsigned int this_uses = elt->n_uses + parent_uses;
1391 unsigned int this_copies = elt->n_copies + parent_copies;
1393 /* Consider groups of sub-elements as weighing in favour of
1394 instantiation whatever their size. */
1395 for (group = elt->groups; group ; group = group->sibling)
1396 FOR_EACH_ACTUAL_CHILD (c, group)
1398 c->n_uses += group->n_uses;
1399 c->n_copies += group->n_copies;
1402 for (c = elt->children; c ; c = c->sibling)
1403 decide_instantiation_1 (c, this_uses, this_copies);
1407 /* Compute the size and number of all instantiated elements below ELT.
1408 We will only care about this if the size of the complete structure
1409 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1412 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1414 if (elt->replacement)
1416 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1422 unsigned int count = 0;
1424 for (c = elt->children; c ; c = c->sibling)
1425 count += sum_instantiated_sizes (c, sizep);
1431 /* Instantiate fields in ELT->TYPE that are not currently present as
1434 static void instantiate_missing_elements (struct sra_elt *elt);
1436 static struct sra_elt *
1437 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1439 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1442 if (sub->replacement == NULL)
1443 instantiate_element (sub);
1446 instantiate_missing_elements (sub);
1450 /* Obtain the canonical type for field F of ELEMENT. */
1453 canon_type_for_field (tree f, tree element)
1455 tree field_type = TREE_TYPE (f);
1457 /* canonicalize_component_ref() unwidens some bit-field types (not
1458 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1459 may introduce type mismatches. */
1460 if (INTEGRAL_TYPE_P (field_type)
1461 && DECL_MODE (f) != TYPE_MODE (field_type))
1462 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1471 /* Look for adjacent fields of ELT starting at F that we'd like to
1472 scalarize as a single variable. Return the last field of the
1476 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1479 unsigned HOST_WIDE_INT align, bit, size, alchk;
1480 enum machine_mode mode;
1481 tree first = f, prev;
1483 struct sra_elt *block;
1485 /* Point fields are typically best handled as standalone entities. */
1486 if (POINTER_TYPE_P (TREE_TYPE (f)))
1489 if (!is_sra_scalar_type (TREE_TYPE (f))
1490 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1491 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1492 || !host_integerp (DECL_SIZE (f), 1)
1493 || lookup_element (elt, f, NULL, NO_INSERT))
1498 /* For complex and array objects, there are going to be integer
1499 literals as child elements. In this case, we can't just take the
1500 alignment and mode of the decl, so we instead rely on the element
1503 ??? We could try to infer additional alignment from the full
1504 object declaration and the location of the sub-elements we're
1506 for (count = 0; !DECL_P (block->element); count++)
1507 block = block->parent;
1509 align = DECL_ALIGN (block->element);
1510 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1514 type = TREE_TYPE (block->element);
1516 type = TREE_TYPE (type);
1518 align = TYPE_ALIGN (type);
1519 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1525 /* Coalescing wider fields is probably pointless and
1527 if (align > BITS_PER_WORD)
1528 align = BITS_PER_WORD;
1530 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1531 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1532 size = tree_low_cst (DECL_SIZE (f), 1);
1537 if ((bit & alchk) != ((bit + size - 1) & alchk))
1540 /* Find adjacent fields in the same alignment word. */
1542 for (prev = f, f = TREE_CHAIN (f);
1543 f && TREE_CODE (f) == FIELD_DECL
1544 && is_sra_scalar_type (TREE_TYPE (f))
1545 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1546 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1547 && host_integerp (DECL_SIZE (f), 1)
1548 && !lookup_element (elt, f, NULL, NO_INSERT);
1549 prev = f, f = TREE_CHAIN (f))
1551 unsigned HOST_WIDE_INT nbit, nsize;
1553 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1554 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1555 nsize = tree_low_cst (DECL_SIZE (f), 1);
1557 if (bit + size == nbit)
1559 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1561 /* If we're at an alignment boundary, don't bother
1562 growing alignment such that we can include this next
1565 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1568 align = GET_MODE_BITSIZE (DECL_MODE (f));
1572 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1577 else if (nbit + nsize == bit)
1579 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1582 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1585 align = GET_MODE_BITSIZE (DECL_MODE (f));
1589 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1604 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1606 /* Try to widen the bit range so as to cover padding bits as well. */
1608 if ((bit & ~alchk) || size != align)
1610 unsigned HOST_WIDE_INT mbit = bit & alchk;
1611 unsigned HOST_WIDE_INT msize = align;
1613 for (f = TYPE_FIELDS (elt->type);
1614 f; f = TREE_CHAIN (f))
1616 unsigned HOST_WIDE_INT fbit, fsize;
1618 /* Skip the fields from first to prev. */
1625 if (!(TREE_CODE (f) == FIELD_DECL
1626 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1627 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1630 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1631 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1633 /* If we're past the selected word, we're fine. */
1634 if ((bit & alchk) < (fbit & alchk))
1637 if (host_integerp (DECL_SIZE (f), 1))
1638 fsize = tree_low_cst (DECL_SIZE (f), 1);
1640 /* Assume a variable-sized field takes up all space till
1641 the end of the word. ??? Endianness issues? */
1642 fsize = align - (fbit & alchk);
1644 if ((fbit & alchk) < (bit & alchk))
1646 /* A large field might start at a previous word and
1647 extend into the selected word. Exclude those
1648 bits. ??? Endianness issues? */
1649 HOST_WIDE_INT diff = fbit + fsize - mbit;
1659 /* Non-overlapping, great. */
1660 if (fbit + fsize <= mbit
1661 || mbit + msize <= fbit)
1666 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1670 else if (fbit > mbit)
1671 msize -= (mbit + msize - fbit);
1681 /* Now we know the bit range we're interested in. Find the smallest
1682 machine mode we can use to access it. */
1684 for (mode = smallest_mode_for_size (size, MODE_INT);
1686 mode = GET_MODE_WIDER_MODE (mode))
1688 gcc_assert (mode != VOIDmode);
1690 alchk = GET_MODE_PRECISION (mode) - 1;
1693 if ((bit & alchk) == ((bit + size - 1) & alchk))
1697 gcc_assert (~alchk < align);
1699 /* Create the field group as a single variable. */
1701 /* We used to create a type for the mode above, but size turns
1702 to be out not of mode-size. As we need a matching type
1703 to build a BIT_FIELD_REF, use a nonstandard integer type as
1705 type = lang_hooks.types.type_for_size (size, 1);
1706 if (!type || TYPE_PRECISION (type) != size)
1707 type = build_nonstandard_integer_type (size, 1);
1709 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1710 bitsize_int (size), bitsize_int (bit));
1712 block = instantiate_missing_elements_1 (elt, var, type);
1713 gcc_assert (block && block->is_scalar);
1715 var = block->replacement;
1718 || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
1720 block->replacement = fold_build3 (BIT_FIELD_REF,
1721 TREE_TYPE (block->element), var,
1723 bitsize_int (bit & ~alchk));
1726 block->in_bitfld_block = 2;
1728 /* Add the member fields to the group, such that they access
1729 portions of the group variable. */
1731 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1733 tree field_type = canon_type_for_field (f, elt->element);
1734 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1736 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1738 fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
1741 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1744 (DECL_FIELD_BIT_OFFSET (f))))
1746 fld->in_bitfld_block = 1;
1753 instantiate_missing_elements (struct sra_elt *elt)
1755 tree type = elt->type;
1757 switch (TREE_CODE (type))
1762 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1763 if (TREE_CODE (f) == FIELD_DECL)
1765 tree last = try_instantiate_multiple_fields (elt, f);
1773 instantiate_missing_elements_1 (elt, f,
1774 canon_type_for_field
1782 tree i, max, subtype;
1784 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1785 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1786 subtype = TREE_TYPE (type);
1790 instantiate_missing_elements_1 (elt, i, subtype);
1791 if (tree_int_cst_equal (i, max))
1793 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1800 type = TREE_TYPE (type);
1801 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1802 instantiate_missing_elements_1 (elt, integer_one_node, type);
1810 /* Return true if there is only one non aggregate field in the record, TYPE.
1811 Return false otherwise. */
1814 single_scalar_field_in_record_p (tree type)
1818 if (TREE_CODE (type) != RECORD_TYPE)
1821 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1822 if (TREE_CODE (field) == FIELD_DECL)
1826 if (num_fields == 2)
1829 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1836 /* Make one pass across an element tree deciding whether to perform block
1837 or element copies. If we decide on element copies, instantiate all
1838 elements. Return true if there are any instantiated sub-elements. */
1841 decide_block_copy (struct sra_elt *elt)
1846 /* We shouldn't be invoked on groups of sub-elements as they must
1847 behave like their parent as far as block copy is concerned. */
1848 gcc_assert (!elt->is_group);
1850 /* If scalarization is disabled, respect it. */
1851 if (elt->cannot_scalarize)
1853 elt->use_block_copy = 1;
1857 fputs ("Scalarization disabled for ", dump_file);
1858 dump_sra_elt_name (dump_file, elt);
1859 fputc ('\n', dump_file);
1862 /* Disable scalarization of sub-elements */
1863 for (c = elt->children; c; c = c->sibling)
1865 c->cannot_scalarize = 1;
1866 decide_block_copy (c);
1869 /* Groups behave like their parent. */
1870 for (c = elt->groups; c; c = c->sibling)
1872 c->cannot_scalarize = 1;
1873 c->use_block_copy = 1;
1879 /* Don't decide if we've no uses and no groups. */
1880 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1883 else if (!elt->is_scalar)
1885 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1886 bool use_block_copy = true;
1888 /* Tradeoffs for COMPLEX types pretty much always make it better
1889 to go ahead and split the components. */
1890 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1891 use_block_copy = false;
1893 /* Don't bother trying to figure out the rest if the structure is
1894 so large we can't do easy arithmetic. This also forces block
1895 copies for variable sized structures. */
1896 else if (host_integerp (size_tree, 1))
1898 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1899 unsigned int max_size, max_count, inst_count, full_count;
1901 /* If the sra-max-structure-size parameter is 0, then the
1902 user has not overridden the parameter and we can choose a
1903 sensible default. */
1904 max_size = SRA_MAX_STRUCTURE_SIZE
1905 ? SRA_MAX_STRUCTURE_SIZE
1906 : MOVE_RATIO (optimize_function_for_speed_p (cfun)) * UNITS_PER_WORD;
1907 max_count = SRA_MAX_STRUCTURE_COUNT
1908 ? SRA_MAX_STRUCTURE_COUNT
1909 : MOVE_RATIO (optimize_function_for_speed_p (cfun));
1911 full_size = tree_low_cst (size_tree, 1);
1912 full_count = count_type_elements (elt->type, false);
1913 inst_count = sum_instantiated_sizes (elt, &inst_size);
1915 /* If there is only one scalar field in the record, don't block copy. */
1916 if (single_scalar_field_in_record_p (elt->type))
1917 use_block_copy = false;
1919 /* ??? What to do here. If there are two fields, and we've only
1920 instantiated one, then instantiating the other is clearly a win.
1921 If there are a large number of fields then the size of the copy
1922 is much more of a factor. */
1924 /* If the structure is small, and we've made copies, go ahead
1925 and instantiate, hoping that the copies will go away. */
1926 if (full_size <= max_size
1927 && (full_count - inst_count) <= max_count
1928 && elt->n_copies > elt->n_uses)
1929 use_block_copy = false;
1930 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1931 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1932 use_block_copy = false;
1934 /* In order to avoid block copy, we have to be able to instantiate
1935 all elements of the type. See if this is possible. */
1937 && (!can_completely_scalarize_p (elt)
1938 || !type_can_instantiate_all_elements (elt->type)))
1939 use_block_copy = true;
1942 elt->use_block_copy = use_block_copy;
1944 /* Groups behave like their parent. */
1945 for (c = elt->groups; c; c = c->sibling)
1946 c->use_block_copy = use_block_copy;
1950 fprintf (dump_file, "Using %s for ",
1951 use_block_copy ? "block-copy" : "element-copy");
1952 dump_sra_elt_name (dump_file, elt);
1953 fputc ('\n', dump_file);
1956 if (!use_block_copy)
1958 instantiate_missing_elements (elt);
1963 any_inst = elt->replacement != NULL;
1965 for (c = elt->children; c ; c = c->sibling)
1966 any_inst |= decide_block_copy (c);
1971 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1974 decide_instantiations (void)
1978 bitmap_head done_head;
1981 /* We cannot clear bits from a bitmap we're iterating over,
1982 so save up all the bits to clear until the end. */
1983 bitmap_initialize (&done_head, &bitmap_default_obstack);
1984 cleared_any = false;
1986 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1988 tree var = referenced_var (i);
1989 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1992 decide_instantiation_1 (elt, 0, 0);
1993 if (!decide_block_copy (elt))
1998 bitmap_set_bit (&done_head, i);
2005 bitmap_and_compl_into (sra_candidates, &done_head);
2006 bitmap_and_compl_into (needs_copy_in, &done_head);
2008 bitmap_clear (&done_head);
2010 mark_set_for_renaming (sra_candidates);
2013 fputc ('\n', dump_file);
2017 /* Phase Four: Update the function to match the replacements created. */
2019 /* Mark all the variables in VDEF/VUSE operators for STMT for
2020 renaming. This becomes necessary when we modify all of a
2024 mark_all_v_defs_stmt (gimple stmt)
2029 update_stmt_if_modified (stmt);
2031 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
2033 if (TREE_CODE (sym) == SSA_NAME)
2034 sym = SSA_NAME_VAR (sym);
2035 mark_sym_for_renaming (sym);
2040 /* Mark all the variables in virtual operands in all the statements in
2041 LIST for renaming. */
2044 mark_all_v_defs_seq (gimple_seq seq)
2046 gimple_stmt_iterator gsi;
2048 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
2049 mark_all_v_defs_stmt (gsi_stmt (gsi));
2052 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2055 mark_no_warning (struct sra_elt *elt)
2057 if (!elt->all_no_warning)
2059 if (elt->replacement)
2060 TREE_NO_WARNING (elt->replacement) = 1;
2064 FOR_EACH_ACTUAL_CHILD (c, elt)
2065 mark_no_warning (c);
2067 elt->all_no_warning = true;
2071 /* Build a single level component reference to ELT rooted at BASE. */
2074 generate_one_element_ref (struct sra_elt *elt, tree base)
2076 switch (TREE_CODE (TREE_TYPE (base)))
2080 tree field = elt->element;
2082 /* We can't test elt->in_bitfld_block here because, when this is
2083 called from instantiate_element, we haven't set this field
2085 if (TREE_CODE (field) == BIT_FIELD_REF)
2087 tree ret = unshare_expr (field);
2088 TREE_OPERAND (ret, 0) = base;
2092 /* Watch out for compatible records with differing field lists. */
2093 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2094 field = find_compatible_field (TREE_TYPE (base), field);
2096 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2100 if (TREE_CODE (elt->element) == RANGE_EXPR)
2101 return build4 (ARRAY_RANGE_REF, elt->type, base,
2102 TREE_OPERAND (elt->element, 0), NULL, NULL);
2104 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2107 if (elt->element == integer_zero_node)
2108 return build1 (REALPART_EXPR, elt->type, base);
2110 return build1 (IMAGPART_EXPR, elt->type, base);
2117 /* Build a full component reference to ELT rooted at its native variable. */
2120 generate_element_ref (struct sra_elt *elt)
2123 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2125 return elt->element;
2128 /* Return true if BF is a bit-field that we can handle like a scalar. */
2131 scalar_bitfield_p (tree bf)
2133 return (TREE_CODE (bf) == BIT_FIELD_REF
2134 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2135 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2136 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2137 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2138 (TREE_OPERAND (bf, 0))))
2139 <= BITS_PER_WORD)))));
2142 /* Create an assignment statement from SRC to DST. */
2145 sra_build_assignment (tree dst, tree src)
2148 gimple_seq seq = NULL, seq2 = NULL;
2149 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2150 to do a much better job at optimizing the code.
2151 From dst = BIT_FIELD_REF <var, sz, off> we produce
2153 SR.1 = (scalar type) var;
2155 SR.3 = SR.2 & ((1 << sz) - 1);
2156 ... possible sign extension of SR.3 ...
2157 dst = (destination type) SR.3;
2159 if (scalar_bitfield_p (src))
2161 tree var, shift, width;
2163 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2164 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2165 struct gimplify_ctx gctx;
2167 var = TREE_OPERAND (src, 0);
2168 width = TREE_OPERAND (src, 1);
2169 /* The offset needs to be adjusted to a right shift quantity
2170 depending on the endianness. */
2171 if (BYTES_BIG_ENDIAN)
2173 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2174 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2177 shift = TREE_OPERAND (src, 2);
2179 /* In weird cases we have non-integral types for the source or
2181 ??? For unknown reasons we also want an unsigned scalar type. */
2182 stype = TREE_TYPE (var);
2183 if (!INTEGRAL_TYPE_P (stype))
2184 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2185 (TYPE_SIZE (stype)), 1);
2186 else if (!TYPE_UNSIGNED (stype))
2187 stype = unsigned_type_for (stype);
2189 utype = TREE_TYPE (dst);
2190 if (!INTEGRAL_TYPE_P (utype))
2191 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2192 (TYPE_SIZE (utype)), 1);
2193 else if (!TYPE_UNSIGNED (utype))
2194 utype = unsigned_type_for (utype);
2196 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2197 we use for computation if we cannot use it directly. */
2198 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2199 var = fold_convert (stype, var);
2201 var = fold_build1 (VIEW_CONVERT_EXPR, stype, var);
2203 if (!integer_zerop (shift))
2204 var = fold_build2 (RSHIFT_EXPR, stype, var, shift);
2206 /* If we need a masking operation, produce one. */
2207 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2211 tree one = build_int_cst_wide (stype, 1, 0);
2212 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2213 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2214 var = fold_build2 (BIT_AND_EXPR, stype, var, mask);
2217 /* After shifting and masking, convert to the target type. */
2218 var = fold_convert (utype, var);
2220 /* Perform sign extension, if required.
2221 ??? This should never be necessary. */
2224 tree signbit = int_const_binop (LSHIFT_EXPR,
2225 build_int_cst_wide (utype, 1, 0),
2226 size_binop (MINUS_EXPR, width,
2227 bitsize_int (1)), 0);
2229 var = fold_build2 (BIT_XOR_EXPR, utype, var, signbit);
2230 var = fold_build2 (MINUS_EXPR, utype, var, signbit);
2233 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2236 /* Finally, move and convert to the destination. */
2237 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2238 var = fold_convert (TREE_TYPE (dst), var);
2240 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2242 push_gimplify_context (&gctx);
2243 gctx.into_ssa = true;
2244 gctx.allow_rhs_cond_expr = true;
2246 gimplify_assign (dst, var, &seq);
2248 if (gimple_referenced_vars (cfun))
2249 for (var = gctx.temps; var; var = TREE_CHAIN (var))
2250 add_referenced_var (var);
2251 pop_gimplify_context (NULL);
2256 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2257 if (CONVERT_EXPR_P (dst))
2260 src = fold_convert (TREE_TYPE (dst), src);
2262 /* It was hoped that we could perform some type sanity checking
2263 here, but since front-ends can emit accesses of fields in types
2264 different from their nominal types and copy structures containing
2265 them as a whole, we'd have to handle such differences here.
2266 Since such accesses under different types require compatibility
2267 anyway, there's little point in making tests and/or adding
2268 conversions to ensure the types of src and dst are the same.
2269 So we just assume type differences at this point are ok.
2270 The only exception we make here are pointer types, which can be different
2271 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2272 else if (POINTER_TYPE_P (TREE_TYPE (dst))
2273 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2274 src = fold_convert (TREE_TYPE (dst), src);
2276 /* ??? Only call the gimplifier if we need to. Otherwise we may
2277 end up substituting with DECL_VALUE_EXPR - see PR37380. */
2278 if (!handled_component_p (src)
2279 && !SSA_VAR_P (src))
2281 src = force_gimple_operand (src, &seq2, false, NULL_TREE);
2282 gimple_seq_add_seq (&seq, seq2);
2284 stmt = gimple_build_assign (dst, src);
2285 gimple_seq_add_stmt (&seq, stmt);
2289 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2290 takes care of assignments, but we must create copies for uses. */
2291 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2293 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2294 BIT_FIELD_REF, turn it into bit operations. */
2297 sra_build_bf_assignment (tree dst, tree src)
2299 tree var, type, utype, tmp, tmp2, tmp3;
2302 tree cst, cst2, mask;
2303 tree minshift, maxshift;
2305 if (TREE_CODE (dst) != BIT_FIELD_REF)
2306 return sra_build_assignment (dst, src);
2308 var = TREE_OPERAND (dst, 0);
2310 if (!scalar_bitfield_p (dst))
2311 return sra_build_assignment (REPLDUP (dst), src);
2315 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2316 cst2 = size_binop (PLUS_EXPR,
2317 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2320 if (BYTES_BIG_ENDIAN)
2322 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2323 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2331 type = TREE_TYPE (var);
2332 if (!INTEGRAL_TYPE_P (type))
2333 type = lang_hooks.types.type_for_size
2334 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2335 if (TYPE_UNSIGNED (type))
2338 utype = unsigned_type_for (type);
2340 mask = build_int_cst_wide (utype, 1, 0);
2341 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2342 cst = build_int_cst_wide (utype, 0, 0);
2344 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2345 if (integer_zerop (minshift))
2348 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2349 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2350 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2352 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2353 && !integer_zerop (mask))
2356 if (!is_gimple_variable (tmp))
2357 tmp = unshare_expr (var);
2359 TREE_NO_WARNING (var) = true;
2361 tmp2 = make_rename_temp (utype, "SR");
2363 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2364 tmp = fold_convert (utype, tmp);
2366 tmp = fold_build1 (VIEW_CONVERT_EXPR, utype, tmp);
2368 stmt = gimple_build_assign (tmp2, tmp);
2369 gimple_seq_add_stmt (&seq, stmt);
2374 if (!integer_zerop (mask))
2376 tmp = make_rename_temp (utype, "SR");
2377 stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
2379 gimple_seq_add_stmt (&seq, stmt);
2384 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2386 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2389 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2390 tmp_seq = sra_build_assignment (tmp2, src);
2391 gimple_seq_add_seq (&seq, tmp_seq);
2396 tmp2 = make_rename_temp
2397 (lang_hooks.types.type_for_size
2398 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2400 tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2401 TREE_TYPE (tmp2), src));
2402 gimple_seq_add_seq (&seq, tmp_seq);
2405 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2408 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2409 tmp3 = make_rename_temp (ut, "SR");
2410 tmp2 = fold_convert (ut, tmp2);
2411 tmp_seq = sra_build_assignment (tmp3, tmp2);
2412 gimple_seq_add_seq (&seq, tmp_seq);
2414 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2415 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2416 tmp2 = fold_convert (ut, tmp2);
2417 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2421 tmp3 = make_rename_temp (ut, "SR");
2422 tmp_seq = sra_build_assignment (tmp3, tmp2);
2423 gimple_seq_add_seq (&seq, tmp_seq);
2429 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2432 tmp3 = make_rename_temp (utype, "SR");
2433 tmp2 = fold_convert (utype, tmp2);
2434 tmp_seq = sra_build_assignment (tmp3, tmp2);
2435 gimple_seq_add_seq (&seq, tmp_seq);
2439 if (!integer_zerop (minshift))
2441 tmp3 = make_rename_temp (utype, "SR");
2442 stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
2444 gimple_seq_add_stmt (&seq, stmt);
2448 if (utype != TREE_TYPE (var))
2449 tmp3 = make_rename_temp (utype, "SR");
2452 stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
2454 gimple_seq_add_stmt (&seq, stmt);
2458 if (TREE_TYPE (var) == type)
2459 stmt = gimple_build_assign (var, fold_convert (type, tmp3));
2461 stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
2462 TREE_TYPE (var), tmp3));
2463 gimple_seq_add_stmt (&seq, stmt);
2469 /* Expand an assignment of SRC to the scalarized representation of
2470 ELT. If it is a field group, try to widen the assignment to cover
2471 the full variable. */
2474 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2476 tree dst = elt->replacement;
2477 tree var, tmp, cst, cst2;
2481 if (TREE_CODE (dst) != BIT_FIELD_REF
2482 || !elt->in_bitfld_block)
2483 return sra_build_assignment (REPLDUP (dst), src);
2485 var = TREE_OPERAND (dst, 0);
2487 /* Try to widen the assignment to the entire variable.
2488 We need the source to be a BIT_FIELD_REF as well, such that, for
2489 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2490 by design, conditions are met such that we can turn it into
2491 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2492 if (elt->in_bitfld_block == 2
2493 && TREE_CODE (src) == BIT_FIELD_REF)
2496 cst = TYPE_SIZE (TREE_TYPE (var));
2497 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2498 TREE_OPERAND (dst, 2));
2500 src = TREE_OPERAND (src, 0);
2502 /* Avoid full-width bit-fields. */
2503 if (integer_zerop (cst2)
2504 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2506 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2507 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2508 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2510 /* If a single conversion won't do, we'll need a statement
2512 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2513 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2518 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2519 src = fold_build1 (VIEW_CONVERT_EXPR,
2520 lang_hooks.types.type_for_size
2522 (TYPE_SIZE (TREE_TYPE (src))),
2524 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2526 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2527 stmt = gimple_build_assign (tmp, src);
2528 gimple_seq_add_stmt (&seq, stmt);
2530 tmp_seq = sra_build_assignment (var,
2531 fold_convert (TREE_TYPE (var),
2533 gimple_seq_add_seq (&seq, tmp_seq);
2538 src = fold_convert (TREE_TYPE (var), src);
2542 src = fold_convert (TREE_TYPE (var), tmp);
2545 return sra_build_assignment (var, src);
2548 return sra_build_bf_assignment (dst, src);
2551 /* Generate a set of assignment statements in *LIST_P to copy all
2552 instantiated elements under ELT to or from the equivalent structure
2553 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2554 true meaning to copy out of EXPR into ELT. */
2557 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2564 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2565 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2569 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2571 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2574 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2575 tmp_seq = sra_build_bf_assignment (expr, t);
2576 SSA_NAME_DEF_STMT (expr) = gimple_seq_last_stmt (tmp_seq);
2577 gimple_seq_add_seq (seq_p, tmp_seq);
2579 else if (elt->replacement)
2582 tmp_seq = sra_build_elt_assignment (elt, expr);
2584 tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2585 gimple_seq_add_seq (seq_p, tmp_seq);
2589 FOR_EACH_ACTUAL_CHILD (c, elt)
2591 t = generate_one_element_ref (c, unshare_expr (expr));
2592 generate_copy_inout (c, copy_out, t, seq_p);
2597 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2598 elements under SRC to their counterparts under DST. There must be a 1-1
2599 correspondence of instantiated elements. */
2602 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
2604 struct sra_elt *dc, *sc;
2606 FOR_EACH_ACTUAL_CHILD (dc, dst)
2608 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2609 if (!sc && dc->in_bitfld_block == 2)
2611 struct sra_elt *dcs;
2613 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2615 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2617 generate_element_copy (dcs, sc, seq_p);
2623 /* If DST and SRC are structs with the same elements, but do not have
2624 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2625 will fail. Try harder by finding the corresponding FIELD_DECL
2631 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2632 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2633 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2634 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2635 DECL_FIELD_OFFSET (dc->element)) > 0
2636 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2637 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2638 && simple_cst_equal (DECL_SIZE (f),
2639 DECL_SIZE (dc->element)) > 0
2640 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2642 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2643 && POINTER_TYPE_P (TREE_TYPE (f)))))
2645 gcc_assert (f != NULL_TREE);
2646 sc = lookup_element (src, f, NULL, NO_INSERT);
2649 generate_element_copy (dc, sc, seq_p);
2652 if (dst->replacement)
2656 gcc_assert (src->replacement);
2658 tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2659 gimple_seq_add_seq (seq_p, tmp_seq);
2663 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2664 elements under ELT. In addition, do not assign to elements that have been
2665 marked VISITED but do reset the visited flag; this allows easy coordination
2666 with generate_element_init. */
2669 generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
2675 elt->visited = false;
2679 if (!elt->in_bitfld_block)
2680 FOR_EACH_ACTUAL_CHILD (c, elt)
2681 generate_element_zero (c, seq_p);
2683 if (elt->replacement)
2688 gcc_assert (elt->is_scalar);
2689 t = fold_convert (elt->type, integer_zero_node);
2691 tmp_seq = sra_build_elt_assignment (elt, t);
2692 gimple_seq_add_seq (seq_p, tmp_seq);
2696 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2697 Add the result to *LIST_P. */
2700 generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2702 gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
2703 gimple_seq_add_seq (seq_p, tmp_seq);
2706 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2707 elements under ELT with the contents of the initializer INIT. In addition,
2708 mark all assigned elements VISITED; this allows easy coordination with
2709 generate_element_zero. Return false if we found a case we couldn't
2713 generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2716 enum tree_code init_code;
2717 struct sra_elt *sub;
2719 unsigned HOST_WIDE_INT idx;
2720 tree value, purpose;
2722 /* We can be passed DECL_INITIAL of a static variable. It might have a
2723 conversion, which we strip off here. */
2724 STRIP_USELESS_TYPE_CONVERSION (init);
2725 init_code = TREE_CODE (init);
2729 if (elt->replacement)
2731 generate_one_element_init (elt, init, seq_p);
2732 elt->visited = true;
2741 FOR_EACH_ACTUAL_CHILD (sub, elt)
2743 if (sub->element == integer_zero_node)
2744 t = (init_code == COMPLEX_EXPR
2745 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2747 t = (init_code == COMPLEX_EXPR
2748 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2749 result &= generate_element_init_1 (sub, t, seq_p);
2754 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2756 /* Array constructors are routinely created with NULL indices. */
2757 if (purpose == NULL_TREE)
2762 if (TREE_CODE (purpose) == RANGE_EXPR)
2764 tree lower = TREE_OPERAND (purpose, 0);
2765 tree upper = TREE_OPERAND (purpose, 1);
2769 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2771 result &= generate_element_init_1 (sub, value, seq_p);
2772 if (tree_int_cst_equal (lower, upper))
2774 lower = int_const_binop (PLUS_EXPR, lower,
2775 integer_one_node, true);
2780 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2782 result &= generate_element_init_1 (sub, value, seq_p);
2788 elt->visited = true;
2795 /* A wrapper function for generate_element_init_1 that handles cleanup after
2799 generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
2802 struct gimplify_ctx gctx;
2804 push_gimplify_context (&gctx);
2805 ret = generate_element_init_1 (elt, init, seq_p);
2806 pop_gimplify_context (NULL);
2808 /* The replacement can expose previously unreferenced variables. */
2811 gimple_stmt_iterator i;
2813 for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
2814 find_new_referenced_vars (gsi_stmt (i));
2820 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
2821 if BB has more than one edge, STMT will be replicated for each edge.
2822 Also, abnormal edges will be ignored. */
2825 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
2829 unsigned n_copies = -1;
2831 FOR_EACH_EDGE (e, ei, bb->succs)
2832 if (!(e->flags & EDGE_ABNORMAL))
2835 FOR_EACH_EDGE (e, ei, bb->succs)
2836 if (!(e->flags & EDGE_ABNORMAL))
2837 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
2840 /* Helper function to insert LIST before GSI, and set up line number info. */
2843 sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
2845 gimple stmt = gsi_stmt (*gsi);
2847 if (gimple_has_location (stmt))
2848 annotate_all_with_location (seq, gimple_location (stmt));
2849 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
2852 /* Similarly, but insert after GSI. Handles insertion onto edges as well. */
2855 sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
2857 gimple stmt = gsi_stmt (*gsi);
2859 if (gimple_has_location (stmt))
2860 annotate_all_with_location (seq, gimple_location (stmt));
2862 if (stmt_ends_bb_p (stmt))
2863 insert_edge_copies_seq (seq, gsi_bb (*gsi));
2865 gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
2868 /* Similarly, but replace the statement at GSI. */
2871 sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
2873 sra_insert_before (gsi, seq);
2874 gsi_remove (gsi, false);
2875 if (gsi_end_p (*gsi))
2876 *gsi = gsi_last (gsi_seq (*gsi));
2881 /* Data structure that bitfield_overlaps_p fills in with information
2882 about the element passed in and how much of it overlaps with the
2883 bit-range passed it to. */
2885 struct bitfield_overlap_info
2887 /* The bit-length of an element. */
2890 /* The bit-position of the element in its parent. */
2893 /* The number of bits of the element that overlap with the incoming
2897 /* The first bit of the element that overlaps with the incoming bit
2902 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2903 expression (referenced as BF below) accesses any of the bits in FLD,
2904 false if it doesn't. If DATA is non-null, its field_len and
2905 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2906 field_len, field_pos> (referenced as BFLD below) represents the
2907 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2908 overlap_pos> represents the portion of the entire field that
2909 overlaps with BF. */
2912 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2913 struct bitfield_overlap_info *data)
2918 if (TREE_CODE (fld->element) == FIELD_DECL)
2920 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2921 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2922 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2923 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2925 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2927 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2928 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2930 else if (TREE_CODE (fld->element) == INTEGER_CST)
2932 tree domain_type = TYPE_DOMAIN (TREE_TYPE (fld->parent->element));
2933 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2934 fpos = fold_convert (bitsizetype, fld->element);
2935 if (domain_type && TYPE_MIN_VALUE (domain_type))
2936 fpos = size_binop (MINUS_EXPR, fpos,
2937 fold_convert (bitsizetype,
2938 TYPE_MIN_VALUE (domain_type)));
2939 fpos = size_binop (MULT_EXPR, flen, fpos);
2944 gcc_assert (host_integerp (blen, 1)
2945 && host_integerp (bpos, 1)
2946 && host_integerp (flen, 1)
2947 && host_integerp (fpos, 1));
2949 ret = ((!tree_int_cst_lt (fpos, bpos)
2950 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2952 || (!tree_int_cst_lt (bpos, fpos)
2953 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2963 data->field_len = flen;
2964 data->field_pos = fpos;
2966 fend = size_binop (PLUS_EXPR, fpos, flen);
2967 bend = size_binop (PLUS_EXPR, bpos, blen);
2969 if (tree_int_cst_lt (bend, fend))
2970 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
2972 data->overlap_len = NULL;
2974 if (tree_int_cst_lt (fpos, bpos))
2976 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
2977 data->overlap_len = size_binop (MINUS_EXPR,
2984 data->overlap_pos = NULL;
2990 /* Add to LISTP a sequence of statements that copies BLEN bits between
2991 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
2992 and at bit BPOS of ELT. The direction of the copy is given by
2996 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
2997 gimple_seq *seq_p, tree blen, tree bpos,
2998 struct sra_elt *elt)
3000 struct sra_elt *fld;
3001 struct bitfield_overlap_info flp;
3003 FOR_EACH_ACTUAL_CHILD (fld, elt)
3007 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
3010 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3011 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3013 if (fld->replacement)
3015 tree infld, invar, type;
3018 infld = fld->replacement;
3020 type = unsigned_type_for (TREE_TYPE (infld));
3021 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
3022 type = build_nonstandard_integer_type (TREE_INT_CST_LOW (flen), 1);
3024 if (TREE_CODE (infld) == BIT_FIELD_REF)
3026 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
3027 infld = TREE_OPERAND (infld, 0);
3029 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
3030 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3031 DECL_SIZE (fld->element)))
3033 fpos = size_binop (PLUS_EXPR, fpos,
3034 TYPE_SIZE (TREE_TYPE (infld)));
3035 fpos = size_binop (MINUS_EXPR, fpos,
3036 DECL_SIZE (fld->element));
3039 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3041 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3042 if (flp.overlap_pos)
3043 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3044 invar = size_binop (PLUS_EXPR, invar, vpos);
3046 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3049 st = sra_build_bf_assignment (invar, infld);
3051 st = sra_build_bf_assignment (infld, invar);
3053 gimple_seq_add_seq (seq_p, st);
3057 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3058 sub = size_binop (PLUS_EXPR, vpos, sub);
3059 if (flp.overlap_pos)
3060 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3062 sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
3068 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3069 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3070 into the full variable, and to LISTAFTERP, if non-NULL, statements
3071 that copy the (presumably modified) overlapping portions of the
3072 full variable back to the scalarized variables. */
3075 sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
3076 gimple_seq *seq_after_p,
3077 tree blen, tree bpos,
3078 struct sra_elt *elt)
3080 struct sra_elt *fld;
3081 struct bitfield_overlap_info flp;
3083 FOR_EACH_ACTUAL_CHILD (fld, elt)
3084 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3086 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3088 generate_copy_inout (fld, false, generate_element_ref (fld),
3090 mark_no_warning (fld);
3092 generate_copy_inout (fld, true, generate_element_ref (fld),
3097 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3098 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3100 sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
3106 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3107 if elt is scalar, or some occurrence of ELT that requires a complete
3108 aggregate. IS_OUTPUT is true if ELT is being modified. */
3111 scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
3112 bool is_output, bool use_all)
3114 gimple stmt = gsi_stmt (*gsi);
3117 if (elt->replacement)
3119 tree replacement = elt->replacement;
3121 /* If we have a replacement, then updating the reference is as
3122 simple as modifying the existing statement in place. */
3124 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3125 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3126 && is_gimple_assign (stmt)
3127 && gimple_assign_lhs_ptr (stmt) == expr_p)
3130 /* RHS must be a single operand. */
3131 gcc_assert (gimple_assign_single_p (stmt));
3132 newseq = sra_build_elt_assignment (elt, gimple_assign_rhs1 (stmt));
3133 sra_replace (gsi, newseq);
3137 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3138 && is_gimple_assign (stmt)
3139 && gimple_assign_rhs1_ptr (stmt) == expr_p)
3141 tree tmp = make_rename_temp
3142 (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
3143 gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3145 sra_insert_before (gsi, newseq);
3149 mark_all_v_defs_stmt (stmt);
3150 *expr_p = REPLDUP (replacement);
3153 else if (use_all && is_output
3154 && is_gimple_assign (stmt)
3155 && TREE_CODE (bfexpr
3156 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
3157 && &TREE_OPERAND (bfexpr, 0) == expr_p
3158 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3159 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3161 gimple_seq seq_before = NULL;
3162 gimple_seq seq_after = NULL;
3163 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3164 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3165 bool update = false;
3167 if (!elt->use_block_copy)
3169 tree type = TREE_TYPE (bfexpr);
3170 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3173 gimple_assign_set_lhs (stmt, var);
3176 if (!TYPE_UNSIGNED (type))
3178 type = unsigned_type_for (type);
3179 tmp = make_rename_temp (type, "SR");
3180 st = gimple_build_assign (tmp, fold_convert (type, var));
3181 gimple_seq_add_stmt (&seq_after, st);
3185 /* If VAR is wider than BLEN bits, it is padded at the
3186 most-significant end. We want to set VPOS such that
3187 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3188 least-significant BLEN bits of VAR. */
3189 if (BYTES_BIG_ENDIAN)
3190 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3192 vpos = bitsize_int (0);
3193 sra_explode_bitfield_assignment
3194 (var, vpos, false, &seq_after, blen, bpos, elt);
3197 sra_sync_for_bitfield_assignment
3198 (&seq_before, &seq_after, blen, bpos, elt);
3202 mark_all_v_defs_seq (seq_before);
3203 sra_insert_before (gsi, seq_before);
3207 mark_all_v_defs_seq (seq_after);
3208 sra_insert_after (gsi, seq_after);
3214 else if (use_all && !is_output
3215 && is_gimple_assign (stmt)
3216 && TREE_CODE (bfexpr
3217 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
3218 && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
3219 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3220 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3222 gimple_seq seq = NULL;
3223 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3224 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3225 bool update = false;
3227 if (!elt->use_block_copy)
3229 tree type = TREE_TYPE (bfexpr);
3230 tree var = make_rename_temp (type, "SR"), tmp, vpos;
3233 gimple_assign_set_rhs1 (stmt, var);
3236 if (!TYPE_UNSIGNED (type))
3238 type = unsigned_type_for (type);
3239 tmp = make_rename_temp (type, "SR");
3240 st = gimple_build_assign (var,
3241 fold_convert (TREE_TYPE (var), tmp));
3245 gimple_seq_add_stmt (&seq,
3247 (var, build_int_cst_wide (type, 0, 0)));
3249 /* If VAR is wider than BLEN bits, it is padded at the
3250 most-significant end. We want to set VPOS such that
3251 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3252 least-significant BLEN bits of VAR. */
3253 if (BYTES_BIG_ENDIAN)
3254 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3256 vpos = bitsize_int (0);
3257 sra_explode_bitfield_assignment
3258 (var, vpos, true, &seq, blen, bpos, elt);
3261 gimple_seq_add_stmt (&seq, st);
3264 sra_sync_for_bitfield_assignment
3265 (&seq, NULL, blen, bpos, elt);
3269 mark_all_v_defs_seq (seq);
3270 sra_insert_before (gsi, seq);
3278 gimple_seq seq = NULL;
3280 /* Otherwise we need some copies. If ELT is being read, then we
3281 want to store all (modified) sub-elements back into the
3282 structure before the reference takes place. If ELT is being
3283 written, then we want to load the changed values back into
3284 our shadow variables. */
3285 /* ??? We don't check modified for reads, we just always write all of
3286 the values. We should be able to record the SSA number of the VOP
3287 for which the values were last read. If that number matches the
3288 SSA number of the VOP in the current statement, then we needn't
3289 emit an assignment. This would also eliminate double writes when
3290 a structure is passed as more than one argument to a function call.
3291 This optimization would be most effective if sra_walk_function
3292 processed the blocks in dominator order. */
3294 generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
3297 mark_all_v_defs_seq (seq);
3299 sra_insert_after (gsi, seq);
3302 sra_insert_before (gsi, seq);
3304 mark_no_warning (elt);
3309 /* Scalarize a COPY. To recap, this is an assignment statement between
3310 two scalarizable references, LHS_ELT and RHS_ELT. */
3313 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3314 gimple_stmt_iterator *gsi)
3319 if (lhs_elt->replacement && rhs_elt->replacement)
3321 /* If we have two scalar operands, modify the existing statement. */
3322 stmt = gsi_stmt (*gsi);
3324 /* See the commentary in sra_walk_function concerning
3325 RETURN_EXPR, and why we should never see one here. */
3326 gcc_assert (is_gimple_assign (stmt));
3327 gcc_assert (gimple_assign_copy_p (stmt));
3330 gimple_assign_set_lhs (stmt, lhs_elt->replacement);
3331 gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
3334 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3336 /* If either side requires a block copy, then sync the RHS back
3337 to the original structure, leave the original assignment
3338 statement (which will perform the block copy), then load the
3339 LHS values out of its now-updated original structure. */
3340 /* ??? Could perform a modified pair-wise element copy. That
3341 would at least allow those elements that are instantiated in
3342 both structures to be optimized well. */
3345 generate_copy_inout (rhs_elt, false,
3346 generate_element_ref (rhs_elt), &seq);
3349 mark_all_v_defs_seq (seq);
3350 sra_insert_before (gsi, seq);
3354 generate_copy_inout (lhs_elt, true,
3355 generate_element_ref (lhs_elt), &seq);
3358 mark_all_v_defs_seq (seq);
3359 sra_insert_after (gsi, seq);
3364 /* Otherwise both sides must be fully instantiated. In which
3365 case perform pair-wise element assignments and replace the
3366 original block copy statement. */
3368 stmt = gsi_stmt (*gsi);
3369 mark_all_v_defs_stmt (stmt);
3372 generate_element_copy (lhs_elt, rhs_elt, &seq);
3374 mark_all_v_defs_seq (seq);
3375 sra_replace (gsi, seq);
3379 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3380 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3381 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3385 scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
3388 gimple_seq seq = NULL, init_seq = NULL;
3390 /* Generate initialization statements for all members extant in the RHS. */
3393 /* Unshare the expression just in case this is from a decl's initial. */
3394 rhs = unshare_expr (rhs);
3395 result = generate_element_init (lhs_elt, rhs, &init_seq);
3400 /* If we failed to convert the entire initializer, then we must
3401 leave the structure assignment in place and must load values
3402 from the structure into the slots for which we did not find
3403 constants. The easiest way to do this is to generate a complete
3404 copy-out, and then follow that with the constant assignments
3405 that we were able to build. DCE will clean things up. */
3406 gimple_seq seq0 = NULL;
3407 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3409 gimple_seq_add_seq (&seq0, seq);
3414 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3415 a zero value. Initialize the rest of the instantiated elements. */
3416 generate_element_zero (lhs_elt, &seq);
3417 gimple_seq_add_seq (&seq, init_seq);
3420 if (lhs_elt->use_block_copy || !result)
3422 /* Since LHS is not fully instantiated, we must leave the structure
3423 assignment in place. Treating this case differently from a USE
3424 exposes constants to later optimizations. */
3427 mark_all_v_defs_seq (seq);
3428 sra_insert_after (gsi, seq);
3433 /* The LHS is fully instantiated. The list of initializations
3434 replaces the original structure assignment. */
3436 mark_all_v_defs_stmt (gsi_stmt (*gsi));
3437 mark_all_v_defs_seq (seq);
3438 sra_replace (gsi, seq);
3442 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3443 on all INDIRECT_REFs. */
3446 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3450 if (TREE_CODE (t) == INDIRECT_REF)
3452 TREE_THIS_NOTRAP (t) = 1;
3455 else if (IS_TYPE_OR_DECL_P (t))
3461 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3462 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3463 if ELT is on the left-hand side. */
3466 scalarize_ldst (struct sra_elt *elt, tree other,
3467 gimple_stmt_iterator *gsi, bool is_output)
3469 /* Shouldn't have gotten called for a scalar. */
3470 gcc_assert (!elt->replacement);
3472 if (elt->use_block_copy)
3474 /* Since ELT is not fully instantiated, we have to leave the
3475 block copy in place. Treat this as a USE. */
3476 scalarize_use (elt, NULL, gsi, is_output, false);
3480 /* The interesting case is when ELT is fully instantiated. In this
3481 case we can have each element stored/loaded directly to/from the
3482 corresponding slot in OTHER. This avoids a block copy. */
3484 gimple_seq seq = NULL;
3485 gimple stmt = gsi_stmt (*gsi);
3487 mark_all_v_defs_stmt (stmt);
3488 generate_copy_inout (elt, is_output, other, &seq);
3490 mark_all_v_defs_seq (seq);
3492 /* Preserve EH semantics. */
3493 if (stmt_ends_bb_p (stmt))
3495 gimple_stmt_iterator si;
3497 gimple_seq blist = NULL;
3498 bool thr = stmt_could_throw_p (stmt);
3500 /* If the last statement of this BB created an EH edge
3501 before scalarization, we have to locate the first
3502 statement that can throw in the new statement list and
3503 use that as the last statement of this BB, such that EH
3504 semantics is preserved. All statements up to this one
3505 are added to the same BB. All other statements in the
3506 list will be added to normal outgoing edges of the same
3507 BB. If they access any memory, it's the same memory, so
3508 we can assume they won't throw. */
3509 si = gsi_start (seq);
3510 for (first = gsi_stmt (si);
3511 thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
3512 first = gsi_stmt (si))
3514 gsi_remove (&si, false);
3515 gimple_seq_add_stmt (&blist, first);
3518 /* Extract the first remaining statement from LIST, this is
3519 the EH statement if there is one. */
3520 gsi_remove (&si, false);
3523 sra_insert_before (gsi, blist);
3525 /* Replace the old statement with this new representative. */
3526 gsi_replace (gsi, first, true);
3528 if (!gsi_end_p (si))
3530 /* If any reference would trap, then they all would. And more
3531 to the point, the first would. Therefore none of the rest
3532 will trap since the first didn't. Indicate this by
3533 iterating over the remaining statements and set
3534 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3537 walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
3540 while (!gsi_end_p (si));
3542 insert_edge_copies_seq (seq, gsi_bb (*gsi));
3546 sra_replace (gsi, seq);
3550 /* Generate initializations for all scalarizable parameters. */
3553 scalarize_parms (void)
3555 gimple_seq seq = NULL;
3559 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3561 tree var = referenced_var (i);
3562 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3563 generate_copy_inout (elt, true, var, &seq);
3568 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
3569 mark_all_v_defs_seq (seq);
3573 /* Entry point to phase 4. Update the function to match replacements. */
3576 scalarize_function (void)
3578 static const struct sra_walk_fns fns = {
3579 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3582 sra_walk_function (&fns);
3584 gsi_commit_edge_inserts ();
3588 /* Debug helper function. Print ELT in a nice human-readable format. */
3591 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3593 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3595 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3596 dump_sra_elt_name (f, elt->parent);
3601 dump_sra_elt_name (f, elt->parent);
3602 if (DECL_P (elt->element))
3604 if (TREE_CODE (elt->element) == FIELD_DECL)
3606 print_generic_expr (f, elt->element, dump_flags);
3608 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3609 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3610 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3611 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3612 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3613 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3614 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3615 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3617 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3618 TREE_INT_CST_LOW (elt->element));
3622 /* Likewise, but callable from the debugger. */
3625 debug_sra_elt_name (struct sra_elt *elt)
3627 dump_sra_elt_name (stderr, elt);
3628 fputc ('\n', stderr);
3632 sra_init_cache (void)
3634 if (sra_type_decomp_cache)
3637 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3638 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3642 /* Main entry point. */
3647 /* Initialize local variables. */
3649 gcc_obstack_init (&sra_obstack);
3650 sra_candidates = BITMAP_ALLOC (NULL);
3651 needs_copy_in = BITMAP_ALLOC (NULL);
3653 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3655 /* Scan. If we find anything, instantiate and scalarize. */
3656 if (find_candidates_for_sra ())
3659 decide_instantiations ();
3660 scalarize_function ();
3661 if (!bitmap_empty_p (sra_candidates))
3662 todoflags |= TODO_rebuild_alias;
3665 /* Free allocated memory. */
3666 htab_delete (sra_map);
3668 BITMAP_FREE (sra_candidates);
3669 BITMAP_FREE (needs_copy_in);
3670 BITMAP_FREE (sra_type_decomp_cache);
3671 BITMAP_FREE (sra_type_inst_cache);
3672 obstack_free (&sra_obstack, NULL);
3677 tree_sra_early (void)
3685 return ret & ~TODO_rebuild_alias;
3691 return flag_tree_sra != 0;
3694 struct gimple_opt_pass pass_sra_early =
3699 gate_sra, /* gate */
3700 tree_sra_early, /* execute */
3703 0, /* static_pass_number */
3704 TV_TREE_SRA, /* tv_id */
3705 PROP_cfg | PROP_ssa, /* properties_required */
3706 0, /* properties_provided */
3707 0, /* properties_destroyed */
3708 0, /* todo_flags_start */
3712 | TODO_verify_ssa /* todo_flags_finish */
3716 struct gimple_opt_pass pass_sra =
3721 gate_sra, /* gate */
3722 tree_sra, /* execute */
3725 0, /* static_pass_number */
3726 TV_TREE_SRA, /* tv_id */
3727 PROP_cfg | PROP_ssa, /* properties_required */
3728 0, /* properties_provided */
3729 0, /* properties_destroyed */
3730 0, /* todo_flags_start */
3734 | TODO_verify_ssa /* todo_flags_finish */