1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
203 /* Is this access an artificial one created to scalarize some record
205 unsigned grp_total_scalarization : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
314 /* Predicate to test the special value. */
317 no_accesses_p (struct access *access)
319 return access == &no_accesses_representant;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
338 /* Number of statements created by generate_subtree_copies. */
341 /* Number of statements created by load_assign_lhs_subreplacements. */
344 /* Number of times sra_modify_assign has deleted a statement. */
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
350 int separate_lhs_rhs_handling;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
359 /* Number of aggregate parameters that were replaced by one or more of their
361 int aggregate_params_reduced;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
368 dump_access (FILE *f, struct access *access, bool grp)
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
415 dump_access (f, access, true);
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
420 access = access->next_sibling;
425 /* Dump all access trees for a variable, given the pointer to the first root in
429 dump_access_tree (FILE *f, struct access *access)
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
438 access_has_children_p (struct access *acc)
440 return acc && acc->first_child;
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
446 static VEC (access_p, heap) *
447 get_base_access_vector (tree base)
451 slot = pointer_map_contains (base_access_vec, base);
455 return *(VEC (access_p, heap) **) slot;
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
461 static struct access *
462 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
465 while (access && (access->offset != offset || access->size != size))
467 struct access *child = access->first_child;
469 while (child && (child->offset + child->size <= offset))
470 child = child->next_sibling;
477 /* Return the first group representative for DECL or NULL if none exists. */
479 static struct access *
480 get_first_repr_for_decl (tree base)
482 VEC (access_p, heap) *access_vec;
484 access_vec = get_base_access_vector (base);
488 return VEC_index (access_p, access_vec, 0);
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
495 static struct access *
496 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
499 struct access *access;
501 access = get_first_repr_for_decl (base);
502 while (access && (access->offset + access->size <= offset))
503 access = access->next_grp;
507 return find_access_in_subtree (access, offset, size);
510 /* Add LINK to the linked list of assign links of RACC. */
512 add_link_to_rhs (struct access *racc, struct assign_link *link)
514 gcc_assert (link->racc == racc);
516 if (!racc->first_link)
518 gcc_assert (!racc->last_link);
519 racc->first_link = link;
522 racc->last_link->next = link;
524 racc->last_link = link;
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
531 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
533 if (!old_racc->first_link)
535 gcc_assert (!old_racc->last_link);
539 if (new_racc->first_link)
541 gcc_assert (!new_racc->last_link->next);
542 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
544 new_racc->last_link->next = old_racc->first_link;
545 new_racc->last_link = old_racc->last_link;
549 gcc_assert (!new_racc->last_link);
551 new_racc->first_link = old_racc->first_link;
552 new_racc->last_link = old_racc->last_link;
554 old_racc->first_link = old_racc->last_link = NULL;
557 /* Add ACCESS to the work queue (which is actually a stack). */
560 add_access_to_work_queue (struct access *access)
562 if (!access->grp_queued)
564 gcc_assert (!access->next_queued);
565 access->next_queued = work_queue_head;
566 access->grp_queued = 1;
567 work_queue_head = access;
571 /* Pop an access from the work queue, and return it, assuming there is one. */
573 static struct access *
574 pop_access_from_work_queue (void)
576 struct access *access = work_queue_head;
578 work_queue_head = access->next_queued;
579 access->next_queued = NULL;
580 access->grp_queued = 0;
585 /* Allocate necessary structures. */
588 sra_initialize (void)
590 candidate_bitmap = BITMAP_ALLOC (NULL);
591 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
592 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
593 gcc_obstack_init (&name_obstack);
594 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
595 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
596 base_access_vec = pointer_map_create ();
597 memset (&sra_stats, 0, sizeof (sra_stats));
598 encountered_apply_args = false;
599 encountered_recursive_call = false;
600 encountered_unchangable_recursive_call = false;
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
607 void *data ATTRIBUTE_UNUSED)
609 VEC (access_p, heap) *access_vec;
610 access_vec = (VEC (access_p, heap) *) *value;
611 VEC_free (access_p, heap, access_vec);
616 /* Deallocate all general structures. */
619 sra_deinitialize (void)
621 BITMAP_FREE (candidate_bitmap);
622 BITMAP_FREE (should_scalarize_away_bitmap);
623 BITMAP_FREE (cannot_scalarize_away_bitmap);
624 free_alloc_pool (access_pool);
625 free_alloc_pool (link_pool);
626 obstack_free (&name_obstack, NULL);
628 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
629 pointer_map_destroy (base_access_vec);
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
635 disqualify_candidate (tree decl, const char *reason)
637 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
639 if (dump_file && (dump_flags & TDF_DETAILS))
641 fprintf (dump_file, "! Disqualifying ");
642 print_generic_expr (dump_file, decl, 0);
643 fprintf (dump_file, " - %s\n", reason);
647 /* Return true iff the type contains a field or an element which does not allow
651 type_internals_preclude_sra_p (tree type, const char **msg)
656 switch (TREE_CODE (type))
660 case QUAL_UNION_TYPE:
661 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
662 if (TREE_CODE (fld) == FIELD_DECL)
664 tree ft = TREE_TYPE (fld);
666 if (TREE_THIS_VOLATILE (fld))
668 *msg = "volatile structure field";
671 if (!DECL_FIELD_OFFSET (fld))
673 *msg = "no structure field offset";
676 if (!DECL_SIZE (fld))
678 *msg = "zero structure field size";
681 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
683 *msg = "structure field offset not fixed";
686 if (!host_integerp (DECL_SIZE (fld), 1))
688 *msg = "structure field size not fixed";
691 if (AGGREGATE_TYPE_P (ft)
692 && int_bit_position (fld) % BITS_PER_UNIT != 0)
694 *msg = "structure field is bit field";
698 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
705 et = TREE_TYPE (type);
707 if (TYPE_VOLATILE (et))
709 *msg = "element type is volatile";
713 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
723 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
724 base variable if it is. Return T if it is not an SSA_NAME. */
727 get_ssa_base_param (tree t)
729 if (TREE_CODE (t) == SSA_NAME)
731 if (SSA_NAME_IS_DEFAULT_DEF (t))
732 return SSA_NAME_VAR (t);
739 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
740 belongs to, unless the BB has already been marked as a potentially
744 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
746 basic_block bb = gimple_bb (stmt);
747 int idx, parm_index = 0;
750 if (bitmap_bit_p (final_bbs, bb->index))
753 for (parm = DECL_ARGUMENTS (current_function_decl);
754 parm && parm != base;
755 parm = DECL_CHAIN (parm))
758 gcc_assert (parm_index < func_param_count);
760 idx = bb->index * func_param_count + parm_index;
761 if (bb_dereferences[idx] < dist)
762 bb_dereferences[idx] = dist;
765 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
766 the three fields. Also add it to the vector of accesses corresponding to
767 the base. Finally, return the new access. */
769 static struct access *
770 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
772 VEC (access_p, heap) *vec;
773 struct access *access;
776 access = (struct access *) pool_alloc (access_pool);
777 memset (access, 0, sizeof (struct access));
779 access->offset = offset;
782 slot = pointer_map_contains (base_access_vec, base);
784 vec = (VEC (access_p, heap) *) *slot;
786 vec = VEC_alloc (access_p, heap, 32);
788 VEC_safe_push (access_p, heap, vec, access);
790 *((struct VEC (access_p,heap) **)
791 pointer_map_insert (base_access_vec, base)) = vec;
796 /* Create and insert access for EXPR. Return created access, or NULL if it is
799 static struct access *
800 create_access (tree expr, gimple stmt, bool write)
802 struct access *access;
803 HOST_WIDE_INT offset, size, max_size;
805 bool ptr, unscalarizable_region = false;
807 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
809 if (sra_mode == SRA_MODE_EARLY_IPA
810 && TREE_CODE (base) == MEM_REF)
812 base = get_ssa_base_param (TREE_OPERAND (base, 0));
820 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
823 if (sra_mode == SRA_MODE_EARLY_IPA)
825 if (size < 0 || size != max_size)
827 disqualify_candidate (base, "Encountered a variable sized access.");
830 if (TREE_CODE (expr) == COMPONENT_REF
831 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
833 disqualify_candidate (base, "Encountered a bit-field access.");
836 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
839 mark_parm_dereference (base, offset + size, stmt);
843 if (size != max_size)
846 unscalarizable_region = true;
850 disqualify_candidate (base, "Encountered an unconstrained access.");
855 access = create_access_1 (base, offset, size);
857 access->type = TREE_TYPE (expr);
858 access->write = write;
859 access->grp_unscalarizable_region = unscalarizable_region;
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
864 access->non_addressable = 1;
870 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
871 register types or (recursively) records with only these two kinds of fields.
872 It also returns false if any of these records contains a bit-field. */
875 type_consists_of_records_p (tree type)
879 if (TREE_CODE (type) != RECORD_TYPE)
882 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
883 if (TREE_CODE (fld) == FIELD_DECL)
885 tree ft = TREE_TYPE (fld);
887 if (DECL_BIT_FIELD (fld))
890 if (!is_gimple_reg_type (ft)
891 && !type_consists_of_records_p (ft))
898 /* Create total_scalarization accesses for all scalar type fields in DECL that
899 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
900 must be the top-most VAR_DECL representing the variable, OFFSET must be the
901 offset of DECL within BASE. REF must be the memory reference expression for
905 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
908 tree fld, decl_type = TREE_TYPE (decl);
910 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
911 if (TREE_CODE (fld) == FIELD_DECL)
913 HOST_WIDE_INT pos = offset + int_bit_position (fld);
914 tree ft = TREE_TYPE (fld);
915 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
918 if (is_gimple_reg_type (ft))
920 struct access *access;
923 size = tree_low_cst (DECL_SIZE (fld), 1);
924 access = create_access_1 (base, pos, size);
927 access->grp_total_scalarization = 1;
928 /* Accesses for intraprocedural SRA can have their stmt NULL. */
931 completely_scalarize_record (base, fld, pos, nref);
935 /* Create total_scalarization accesses for all scalar type fields in VAR and
936 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
937 type_consists_of_records_p. */
940 completely_scalarize_var (tree var)
942 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
943 struct access *access;
945 access = create_access_1 (var, 0, size);
947 access->type = TREE_TYPE (var);
948 access->grp_total_scalarization = 1;
950 completely_scalarize_record (var, var, 0, var);
953 /* Search the given tree for a declaration by skipping handled components and
954 exclude it from the candidates. */
957 disqualify_base_of_expr (tree t, const char *reason)
959 t = get_base_address (t);
960 if (sra_mode == SRA_MODE_EARLY_IPA
961 && TREE_CODE (t) == MEM_REF)
962 t = get_ssa_base_param (TREE_OPERAND (t, 0));
965 disqualify_candidate (t, reason);
968 /* Scan expression EXPR and create access structures for all accesses to
969 candidates for scalarization. Return the created access or NULL if none is
972 static struct access *
973 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
975 struct access *ret = NULL;
978 if (TREE_CODE (expr) == BIT_FIELD_REF
979 || TREE_CODE (expr) == IMAGPART_EXPR
980 || TREE_CODE (expr) == REALPART_EXPR)
982 expr = TREE_OPERAND (expr, 0);
988 /* We need to dive through V_C_Es in order to get the size of its parameter
989 and not the result type. Ada produces such statements. We are also
990 capable of handling the topmost V_C_E but not any of those buried in other
991 handled components. */
992 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
993 expr = TREE_OPERAND (expr, 0);
995 if (contains_view_convert_expr_p (expr))
997 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1002 switch (TREE_CODE (expr))
1005 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1006 && sra_mode != SRA_MODE_EARLY_IPA)
1014 case ARRAY_RANGE_REF:
1015 ret = create_access (expr, stmt, write);
1022 if (write && partial_ref && ret)
1023 ret->grp_partial_lhs = 1;
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return true if any access has been inserted.
1030 STMT must be the statement from which the expression is taken, WRITE must be
1031 true if the expression is a store and false otherwise. */
1034 build_access_from_expr (tree expr, gimple stmt, bool write)
1036 struct access *access;
1038 access = build_access_from_expr_1 (expr, stmt, write);
1041 /* This means the aggregate is accesses as a whole in a way other than an
1042 assign statement and thus cannot be removed even if we had a scalar
1043 replacement for everything. */
1044 if (cannot_scalarize_away_bitmap)
1045 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1051 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1052 modes in which it matters, return true iff they have been disqualified. RHS
1053 may be NULL, in that case ignore it. If we scalarize an aggregate in
1054 intra-SRA we may need to add statements after each statement. This is not
1055 possible if a statement unconditionally has to end the basic block. */
1057 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1059 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1060 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1062 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1064 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1070 /* Return true iff type of EXP is not sufficiently aligned. */
1073 tree_non_mode_aligned_mem_p (tree exp)
1075 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
1078 if (TREE_CODE (exp) == SSA_NAME
1079 || TREE_CODE (exp) == MEM_REF
1081 || !STRICT_ALIGNMENT)
1084 align = get_object_alignment (exp, BIGGEST_ALIGNMENT);
1085 if (GET_MODE_ALIGNMENT (mode) > align)
1091 /* Scan expressions occuring in STMT, create access structures for all accesses
1092 to candidates for scalarization and remove those candidates which occur in
1093 statements or expressions that prevent them from being split apart. Return
1094 true if any access has been inserted. */
1097 build_accesses_from_assign (gimple stmt)
1100 struct access *lacc, *racc;
1102 if (!gimple_assign_single_p (stmt))
1105 lhs = gimple_assign_lhs (stmt);
1106 rhs = gimple_assign_rhs1 (stmt);
1108 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1111 racc = build_access_from_expr_1 (rhs, stmt, false);
1112 lacc = build_access_from_expr_1 (lhs, stmt, true);
1116 lacc->grp_assignment_write = 1;
1117 lacc->grp_unscalarizable_region |= tree_non_mode_aligned_mem_p (rhs);
1122 racc->grp_assignment_read = 1;
1123 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1124 && !is_gimple_reg_type (racc->type))
1125 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1126 racc->grp_unscalarizable_region |= tree_non_mode_aligned_mem_p (lhs);
1130 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1131 && !lacc->grp_unscalarizable_region
1132 && !racc->grp_unscalarizable_region
1133 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1134 /* FIXME: Turn the following line into an assert after PR 40058 is
1136 && lacc->size == racc->size
1137 && useless_type_conversion_p (lacc->type, racc->type))
1139 struct assign_link *link;
1141 link = (struct assign_link *) pool_alloc (link_pool);
1142 memset (link, 0, sizeof (struct assign_link));
1147 add_link_to_rhs (racc, link);
1150 return lacc || racc;
1153 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1154 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1157 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1158 void *data ATTRIBUTE_UNUSED)
1160 op = get_base_address (op);
1163 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1168 /* Return true iff callsite CALL has at least as many actual arguments as there
1169 are formal parameters of the function currently processed by IPA-SRA. */
1172 callsite_has_enough_arguments_p (gimple call)
1174 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1177 /* Scan function and look for interesting expressions and create access
1178 structures for them. Return true iff any access is created. */
1181 scan_function (void)
1188 gimple_stmt_iterator gsi;
1189 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1191 gimple stmt = gsi_stmt (gsi);
1195 if (final_bbs && stmt_can_throw_external (stmt))
1196 bitmap_set_bit (final_bbs, bb->index);
1197 switch (gimple_code (stmt))
1200 t = gimple_return_retval (stmt);
1202 ret |= build_access_from_expr (t, stmt, false);
1204 bitmap_set_bit (final_bbs, bb->index);
1208 ret |= build_accesses_from_assign (stmt);
1212 for (i = 0; i < gimple_call_num_args (stmt); i++)
1213 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1216 if (sra_mode == SRA_MODE_EARLY_IPA)
1218 tree dest = gimple_call_fndecl (stmt);
1219 int flags = gimple_call_flags (stmt);
1223 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1224 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1225 encountered_apply_args = true;
1226 if (cgraph_get_node (dest)
1227 == cgraph_get_node (current_function_decl))
1229 encountered_recursive_call = true;
1230 if (!callsite_has_enough_arguments_p (stmt))
1231 encountered_unchangable_recursive_call = true;
1236 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1237 bitmap_set_bit (final_bbs, bb->index);
1240 t = gimple_call_lhs (stmt);
1241 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1242 ret |= build_access_from_expr (t, stmt, true);
1246 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1249 bitmap_set_bit (final_bbs, bb->index);
1251 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1253 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1254 ret |= build_access_from_expr (t, stmt, false);
1256 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1258 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1259 ret |= build_access_from_expr (t, stmt, true);
1272 /* Helper of QSORT function. There are pointers to accesses in the array. An
1273 access is considered smaller than another if it has smaller offset or if the
1274 offsets are the same but is size is bigger. */
1277 compare_access_positions (const void *a, const void *b)
1279 const access_p *fp1 = (const access_p *) a;
1280 const access_p *fp2 = (const access_p *) b;
1281 const access_p f1 = *fp1;
1282 const access_p f2 = *fp2;
1284 if (f1->offset != f2->offset)
1285 return f1->offset < f2->offset ? -1 : 1;
1287 if (f1->size == f2->size)
1289 if (f1->type == f2->type)
1291 /* Put any non-aggregate type before any aggregate type. */
1292 else if (!is_gimple_reg_type (f1->type)
1293 && is_gimple_reg_type (f2->type))
1295 else if (is_gimple_reg_type (f1->type)
1296 && !is_gimple_reg_type (f2->type))
1298 /* Put any complex or vector type before any other scalar type. */
1299 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1300 && TREE_CODE (f1->type) != VECTOR_TYPE
1301 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1302 || TREE_CODE (f2->type) == VECTOR_TYPE))
1304 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1305 || TREE_CODE (f1->type) == VECTOR_TYPE)
1306 && TREE_CODE (f2->type) != COMPLEX_TYPE
1307 && TREE_CODE (f2->type) != VECTOR_TYPE)
1309 /* Put the integral type with the bigger precision first. */
1310 else if (INTEGRAL_TYPE_P (f1->type)
1311 && INTEGRAL_TYPE_P (f2->type))
1312 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1313 /* Put any integral type with non-full precision last. */
1314 else if (INTEGRAL_TYPE_P (f1->type)
1315 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1316 != TYPE_PRECISION (f1->type)))
1318 else if (INTEGRAL_TYPE_P (f2->type)
1319 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1320 != TYPE_PRECISION (f2->type)))
1322 /* Stabilize the sort. */
1323 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1326 /* We want the bigger accesses first, thus the opposite operator in the next
1328 return f1->size > f2->size ? -1 : 1;
1332 /* Append a name of the declaration to the name obstack. A helper function for
1336 make_fancy_decl_name (tree decl)
1340 tree name = DECL_NAME (decl);
1342 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1343 IDENTIFIER_LENGTH (name));
1346 sprintf (buffer, "D%u", DECL_UID (decl));
1347 obstack_grow (&name_obstack, buffer, strlen (buffer));
1351 /* Helper for make_fancy_name. */
1354 make_fancy_name_1 (tree expr)
1361 make_fancy_decl_name (expr);
1365 switch (TREE_CODE (expr))
1368 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1369 obstack_1grow (&name_obstack, '$');
1370 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1374 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1375 obstack_1grow (&name_obstack, '$');
1376 /* Arrays with only one element may not have a constant as their
1378 index = TREE_OPERAND (expr, 1);
1379 if (TREE_CODE (index) != INTEGER_CST)
1381 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1382 obstack_grow (&name_obstack, buffer, strlen (buffer));
1386 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1390 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1391 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1393 obstack_1grow (&name_obstack, '$');
1394 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1395 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1396 obstack_grow (&name_obstack, buffer, strlen (buffer));
1403 gcc_unreachable (); /* we treat these as scalars. */
1410 /* Create a human readable name for replacement variable of ACCESS. */
1413 make_fancy_name (tree expr)
1415 make_fancy_name_1 (expr);
1416 obstack_1grow (&name_obstack, '\0');
1417 return XOBFINISH (&name_obstack, char *);
1420 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1421 EXP_TYPE at the given OFFSET. If BASE is something for which
1422 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1423 to insert new statements either before or below the current one as specified
1424 by INSERT_AFTER. This function is not capable of handling bitfields. */
1427 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1428 tree exp_type, gimple_stmt_iterator *gsi,
1431 tree prev_base = base;
1433 HOST_WIDE_INT base_offset;
1435 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1437 base = get_addr_base_and_unit_offset (base, &base_offset);
1439 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1440 offset such as array[var_index]. */
1446 gcc_checking_assert (gsi);
1447 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1448 add_referenced_var (tmp);
1449 tmp = make_ssa_name (tmp, NULL);
1450 addr = build_fold_addr_expr (unshare_expr (prev_base));
1451 STRIP_USELESS_TYPE_CONVERSION (addr);
1452 stmt = gimple_build_assign (tmp, addr);
1453 gimple_set_location (stmt, loc);
1454 SSA_NAME_DEF_STMT (tmp) = stmt;
1456 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1458 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1461 off = build_int_cst (reference_alias_ptr_type (prev_base),
1462 offset / BITS_PER_UNIT);
1465 else if (TREE_CODE (base) == MEM_REF)
1467 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1468 base_offset + offset / BITS_PER_UNIT);
1469 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1470 base = unshare_expr (TREE_OPERAND (base, 0));
1474 off = build_int_cst (reference_alias_ptr_type (base),
1475 base_offset + offset / BITS_PER_UNIT);
1476 base = build_fold_addr_expr (unshare_expr (base));
1479 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1482 /* Construct a memory reference to a part of an aggregate BASE at the given
1483 OFFSET and of the same type as MODEL. In case this is a reference to a
1484 component, the function will replicate the last COMPONENT_REF of model's
1485 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1486 build_ref_for_offset. */
1489 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1490 struct access *model, gimple_stmt_iterator *gsi,
1493 if (TREE_CODE (model->expr) == COMPONENT_REF)
1495 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1496 tree cr_offset = component_ref_field_offset (model->expr);
1498 gcc_assert (cr_offset && host_integerp (cr_offset, 1));
1499 offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
1500 offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fld));
1501 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1502 t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1503 return fold_build3_loc (loc, COMPONENT_REF, model->type, t, fld,
1504 TREE_OPERAND (model->expr, 2));
1507 return build_ref_for_offset (loc, base, offset, model->type,
1511 /* Construct a memory reference consisting of component_refs and array_refs to
1512 a part of an aggregate *RES (which is of type TYPE). The requested part
1513 should have type EXP_TYPE at be the given OFFSET. This function might not
1514 succeed, it returns true when it does and only then *RES points to something
1515 meaningful. This function should be used only to build expressions that we
1516 might need to present to user (e.g. in warnings). In all other situations,
1517 build_ref_for_model or build_ref_for_offset should be used instead. */
1520 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1526 tree tr_size, index, minidx;
1527 HOST_WIDE_INT el_size;
1529 if (offset == 0 && exp_type
1530 && types_compatible_p (exp_type, type))
1533 switch (TREE_CODE (type))
1536 case QUAL_UNION_TYPE:
1538 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1540 HOST_WIDE_INT pos, size;
1541 tree expr, *expr_ptr;
1543 if (TREE_CODE (fld) != FIELD_DECL)
1546 pos = int_bit_position (fld);
1547 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1548 tr_size = DECL_SIZE (fld);
1549 if (!tr_size || !host_integerp (tr_size, 1))
1551 size = tree_low_cst (tr_size, 1);
1557 else if (pos > offset || (pos + size) <= offset)
1560 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1563 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1564 offset - pos, exp_type))
1573 tr_size = TYPE_SIZE (TREE_TYPE (type));
1574 if (!tr_size || !host_integerp (tr_size, 1))
1576 el_size = tree_low_cst (tr_size, 1);
1578 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1579 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1581 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1582 if (!integer_zerop (minidx))
1583 index = int_const_binop (PLUS_EXPR, index, minidx);
1584 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1585 NULL_TREE, NULL_TREE);
1586 offset = offset % el_size;
1587 type = TREE_TYPE (type);
1602 /* Return true iff TYPE is stdarg va_list type. */
1605 is_va_list_type (tree type)
1607 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1610 /* Print message to dump file why a variable was rejected. */
1613 reject (tree var, const char *msg)
1615 if (dump_file && (dump_flags & TDF_DETAILS))
1617 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1618 print_generic_expr (dump_file, var, 0);
1619 fprintf (dump_file, "\n");
1623 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1624 those with type which is suitable for scalarization. */
1627 find_var_candidates (void)
1630 referenced_var_iterator rvi;
1634 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1636 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1638 type = TREE_TYPE (var);
1640 if (!AGGREGATE_TYPE_P (type))
1642 reject (var, "not aggregate");
1645 if (needs_to_live_in_memory (var))
1647 reject (var, "needs to live in memory");
1650 if (TREE_THIS_VOLATILE (var))
1652 reject (var, "is volatile");
1655 if (!COMPLETE_TYPE_P (type))
1657 reject (var, "has incomplete type");
1660 if (!host_integerp (TYPE_SIZE (type), 1))
1662 reject (var, "type size not fixed");
1665 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1667 reject (var, "type size is zero");
1670 if (type_internals_preclude_sra_p (type, &msg))
1675 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1676 we also want to schedule it rather late. Thus we ignore it in
1678 (sra_mode == SRA_MODE_EARLY_INTRA
1679 && is_va_list_type (type)))
1681 reject (var, "is va_list");
1685 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1687 if (dump_file && (dump_flags & TDF_DETAILS))
1689 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1690 print_generic_expr (dump_file, var, 0);
1691 fprintf (dump_file, "\n");
1699 /* Sort all accesses for the given variable, check for partial overlaps and
1700 return NULL if there are any. If there are none, pick a representative for
1701 each combination of offset and size and create a linked list out of them.
1702 Return the pointer to the first representative and make sure it is the first
1703 one in the vector of accesses. */
1705 static struct access *
1706 sort_and_splice_var_accesses (tree var)
1708 int i, j, access_count;
1709 struct access *res, **prev_acc_ptr = &res;
1710 VEC (access_p, heap) *access_vec;
1712 HOST_WIDE_INT low = -1, high = 0;
1714 access_vec = get_base_access_vector (var);
1717 access_count = VEC_length (access_p, access_vec);
1719 /* Sort by <OFFSET, SIZE>. */
1720 VEC_qsort (access_p, access_vec, compare_access_positions);
1723 while (i < access_count)
1725 struct access *access = VEC_index (access_p, access_vec, i);
1726 bool grp_write = access->write;
1727 bool grp_read = !access->write;
1728 bool grp_scalar_write = access->write
1729 && is_gimple_reg_type (access->type);
1730 bool grp_scalar_read = !access->write
1731 && is_gimple_reg_type (access->type);
1732 bool grp_assignment_read = access->grp_assignment_read;
1733 bool grp_assignment_write = access->grp_assignment_write;
1734 bool multiple_scalar_reads = false;
1735 bool total_scalarization = access->grp_total_scalarization;
1736 bool grp_partial_lhs = access->grp_partial_lhs;
1737 bool first_scalar = is_gimple_reg_type (access->type);
1738 bool unscalarizable_region = access->grp_unscalarizable_region;
1740 if (first || access->offset >= high)
1743 low = access->offset;
1744 high = access->offset + access->size;
1746 else if (access->offset > low && access->offset + access->size > high)
1749 gcc_assert (access->offset >= low
1750 && access->offset + access->size <= high);
1753 while (j < access_count)
1755 struct access *ac2 = VEC_index (access_p, access_vec, j);
1756 if (ac2->offset != access->offset || ac2->size != access->size)
1761 grp_scalar_write = (grp_scalar_write
1762 || is_gimple_reg_type (ac2->type));
1767 if (is_gimple_reg_type (ac2->type))
1769 if (grp_scalar_read)
1770 multiple_scalar_reads = true;
1772 grp_scalar_read = true;
1775 grp_assignment_read |= ac2->grp_assignment_read;
1776 grp_assignment_write |= ac2->grp_assignment_write;
1777 grp_partial_lhs |= ac2->grp_partial_lhs;
1778 unscalarizable_region |= ac2->grp_unscalarizable_region;
1779 total_scalarization |= ac2->grp_total_scalarization;
1780 relink_to_new_repr (access, ac2);
1782 /* If there are both aggregate-type and scalar-type accesses with
1783 this combination of size and offset, the comparison function
1784 should have put the scalars first. */
1785 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1786 ac2->group_representative = access;
1792 access->group_representative = access;
1793 access->grp_write = grp_write;
1794 access->grp_read = grp_read;
1795 access->grp_scalar_read = grp_scalar_read;
1796 access->grp_scalar_write = grp_scalar_write;
1797 access->grp_assignment_read = grp_assignment_read;
1798 access->grp_assignment_write = grp_assignment_write;
1799 access->grp_hint = multiple_scalar_reads || total_scalarization;
1800 access->grp_total_scalarization = total_scalarization;
1801 access->grp_partial_lhs = grp_partial_lhs;
1802 access->grp_unscalarizable_region = unscalarizable_region;
1803 if (access->first_link)
1804 add_access_to_work_queue (access);
1806 *prev_acc_ptr = access;
1807 prev_acc_ptr = &access->next_grp;
1810 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1814 /* Create a variable for the given ACCESS which determines the type, name and a
1815 few other properties. Return the variable declaration and store it also to
1816 ACCESS->replacement. */
1819 create_access_replacement (struct access *access, bool rename)
1823 repl = create_tmp_var (access->type, "SR");
1825 add_referenced_var (repl);
1827 mark_sym_for_renaming (repl);
1829 if (!access->grp_partial_lhs
1830 && (TREE_CODE (access->type) == COMPLEX_TYPE
1831 || TREE_CODE (access->type) == VECTOR_TYPE))
1832 DECL_GIMPLE_REG_P (repl) = 1;
1834 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1835 DECL_ARTIFICIAL (repl) = 1;
1836 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1838 if (DECL_NAME (access->base)
1839 && !DECL_IGNORED_P (access->base)
1840 && !DECL_ARTIFICIAL (access->base))
1842 char *pretty_name = make_fancy_name (access->expr);
1843 tree debug_expr = unshare_expr (access->expr), d;
1845 DECL_NAME (repl) = get_identifier (pretty_name);
1846 obstack_free (&name_obstack, pretty_name);
1848 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1849 as DECL_DEBUG_EXPR isn't considered when looking for still
1850 used SSA_NAMEs and thus they could be freed. All debug info
1851 generation cares is whether something is constant or variable
1852 and that get_ref_base_and_extent works properly on the
1854 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1855 switch (TREE_CODE (d))
1858 case ARRAY_RANGE_REF:
1859 if (TREE_OPERAND (d, 1)
1860 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1861 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1862 if (TREE_OPERAND (d, 3)
1863 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1864 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1867 if (TREE_OPERAND (d, 2)
1868 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1869 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1874 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1875 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1876 if (access->grp_no_warning)
1877 TREE_NO_WARNING (repl) = 1;
1879 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1882 TREE_NO_WARNING (repl) = 1;
1886 fprintf (dump_file, "Created a replacement for ");
1887 print_generic_expr (dump_file, access->base, 0);
1888 fprintf (dump_file, " offset: %u, size: %u: ",
1889 (unsigned) access->offset, (unsigned) access->size);
1890 print_generic_expr (dump_file, repl, 0);
1891 fprintf (dump_file, "\n");
1893 sra_stats.replacements++;
1898 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1901 get_access_replacement (struct access *access)
1903 gcc_assert (access->grp_to_be_replaced);
1905 if (!access->replacement_decl)
1906 access->replacement_decl = create_access_replacement (access, true);
1907 return access->replacement_decl;
1910 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1911 not mark it for renaming. */
1914 get_unrenamed_access_replacement (struct access *access)
1916 gcc_assert (!access->grp_to_be_replaced);
1918 if (!access->replacement_decl)
1919 access->replacement_decl = create_access_replacement (access, false);
1920 return access->replacement_decl;
1924 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1925 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1926 to it is not "within" the root. Return false iff some accesses partially
1930 build_access_subtree (struct access **access)
1932 struct access *root = *access, *last_child = NULL;
1933 HOST_WIDE_INT limit = root->offset + root->size;
1935 *access = (*access)->next_grp;
1936 while (*access && (*access)->offset + (*access)->size <= limit)
1939 root->first_child = *access;
1941 last_child->next_sibling = *access;
1942 last_child = *access;
1944 if (!build_access_subtree (access))
1948 if (*access && (*access)->offset < limit)
1954 /* Build a tree of access representatives, ACCESS is the pointer to the first
1955 one, others are linked in a list by the next_grp field. Return false iff
1956 some accesses partially overlap. */
1959 build_access_trees (struct access *access)
1963 struct access *root = access;
1965 if (!build_access_subtree (&access))
1967 root->next_grp = access;
1972 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1976 expr_with_var_bounded_array_refs_p (tree expr)
1978 while (handled_component_p (expr))
1980 if (TREE_CODE (expr) == ARRAY_REF
1981 && !host_integerp (array_ref_low_bound (expr), 0))
1983 expr = TREE_OPERAND (expr, 0);
1988 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1989 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
1990 sorts of access flags appropriately along the way, notably always set
1991 grp_read and grp_assign_read according to MARK_READ and grp_write when
1994 Creating a replacement for a scalar access is considered beneficial if its
1995 grp_hint is set (this means we are either attempting total scalarization or
1996 there is more than one direct read access) or according to the following
1999 Access written to through a scalar type (once or more times)
2001 | Written to in an assignment statement
2003 | | Access read as scalar _once_
2005 | | | Read in an assignment statement
2007 | | | | Scalarize Comment
2008 -----------------------------------------------------------------------------
2009 0 0 0 0 No access for the scalar
2010 0 0 0 1 No access for the scalar
2011 0 0 1 0 No Single read - won't help
2012 0 0 1 1 No The same case
2013 0 1 0 0 No access for the scalar
2014 0 1 0 1 No access for the scalar
2015 0 1 1 0 Yes s = *g; return s.i;
2016 0 1 1 1 Yes The same case as above
2017 1 0 0 0 No Won't help
2018 1 0 0 1 Yes s.i = 1; *g = s;
2019 1 0 1 0 Yes s.i = 5; g = s.i;
2020 1 0 1 1 Yes The same case as above
2021 1 1 0 0 No Won't help.
2022 1 1 0 1 Yes s.i = 1; *g = s;
2023 1 1 1 0 Yes s = *g; return s.i;
2024 1 1 1 1 Yes Any of the above yeses */
2027 analyze_access_subtree (struct access *root, struct access *parent,
2028 bool allow_replacements)
2030 struct access *child;
2031 HOST_WIDE_INT limit = root->offset + root->size;
2032 HOST_WIDE_INT covered_to = root->offset;
2033 bool scalar = is_gimple_reg_type (root->type);
2034 bool hole = false, sth_created = false;
2038 if (parent->grp_read)
2040 if (parent->grp_assignment_read)
2041 root->grp_assignment_read = 1;
2042 if (parent->grp_write)
2043 root->grp_write = 1;
2044 if (parent->grp_assignment_write)
2045 root->grp_assignment_write = 1;
2046 if (parent->grp_total_scalarization)
2047 root->grp_total_scalarization = 1;
2050 if (root->grp_unscalarizable_region)
2051 allow_replacements = false;
2053 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2054 allow_replacements = false;
2056 for (child = root->first_child; child; child = child->next_sibling)
2058 hole |= covered_to < child->offset;
2059 sth_created |= analyze_access_subtree (child, root,
2060 allow_replacements && !scalar);
2062 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2063 root->grp_total_scalarization &= child->grp_total_scalarization;
2064 if (child->grp_covered)
2065 covered_to += child->size;
2070 if (allow_replacements && scalar && !root->first_child
2072 || ((root->grp_scalar_read || root->grp_assignment_read)
2073 && (root->grp_scalar_write || root->grp_assignment_write))))
2075 if (dump_file && (dump_flags & TDF_DETAILS))
2077 fprintf (dump_file, "Marking ");
2078 print_generic_expr (dump_file, root->base, 0);
2079 fprintf (dump_file, " offset: %u, size: %u: ",
2080 (unsigned) root->offset, (unsigned) root->size);
2081 fprintf (dump_file, " to be replaced.\n");
2084 root->grp_to_be_replaced = 1;
2090 if (covered_to < limit)
2093 root->grp_total_scalarization = 0;
2097 && (!hole || root->grp_total_scalarization))
2099 root->grp_covered = 1;
2102 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2103 root->grp_unscalarized_data = 1; /* not covered and written to */
2109 /* Analyze all access trees linked by next_grp by the means of
2110 analyze_access_subtree. */
2112 analyze_access_trees (struct access *access)
2118 if (analyze_access_subtree (access, NULL, true))
2120 access = access->next_grp;
2126 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2127 SIZE would conflict with an already existing one. If exactly such a child
2128 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2131 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2132 HOST_WIDE_INT size, struct access **exact_match)
2134 struct access *child;
2136 for (child = lacc->first_child; child; child = child->next_sibling)
2138 if (child->offset == norm_offset && child->size == size)
2140 *exact_match = child;
2144 if (child->offset < norm_offset + size
2145 && child->offset + child->size > norm_offset)
2152 /* Create a new child access of PARENT, with all properties just like MODEL
2153 except for its offset and with its grp_write false and grp_read true.
2154 Return the new access or NULL if it cannot be created. Note that this access
2155 is created long after all splicing and sorting, it's not located in any
2156 access vector and is automatically a representative of its group. */
2158 static struct access *
2159 create_artificial_child_access (struct access *parent, struct access *model,
2160 HOST_WIDE_INT new_offset)
2162 struct access *access;
2163 struct access **child;
2164 tree expr = parent->base;
2166 gcc_assert (!model->grp_unscalarizable_region);
2168 access = (struct access *) pool_alloc (access_pool);
2169 memset (access, 0, sizeof (struct access));
2170 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2173 access->grp_no_warning = true;
2174 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2175 new_offset, model, NULL, false);
2178 access->base = parent->base;
2179 access->expr = expr;
2180 access->offset = new_offset;
2181 access->size = model->size;
2182 access->type = model->type;
2183 access->grp_write = true;
2184 access->grp_read = false;
2186 child = &parent->first_child;
2187 while (*child && (*child)->offset < new_offset)
2188 child = &(*child)->next_sibling;
2190 access->next_sibling = *child;
2197 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2198 true if any new subaccess was created. Additionally, if RACC is a scalar
2199 access but LACC is not, change the type of the latter, if possible. */
2202 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2204 struct access *rchild;
2205 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2208 if (is_gimple_reg_type (lacc->type)
2209 || lacc->grp_unscalarizable_region
2210 || racc->grp_unscalarizable_region)
2213 if (!lacc->first_child && !racc->first_child
2214 && is_gimple_reg_type (racc->type))
2216 tree t = lacc->base;
2218 lacc->type = racc->type;
2219 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t), lacc->offset,
2224 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2225 lacc->base, lacc->offset,
2227 lacc->grp_no_warning = true;
2232 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2234 struct access *new_acc = NULL;
2235 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2237 if (rchild->grp_unscalarizable_region)
2240 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2245 rchild->grp_hint = 1;
2246 new_acc->grp_hint |= new_acc->grp_read;
2247 if (rchild->first_child)
2248 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2253 rchild->grp_hint = 1;
2254 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2258 if (racc->first_child)
2259 propagate_subaccesses_across_link (new_acc, rchild);
2266 /* Propagate all subaccesses across assignment links. */
2269 propagate_all_subaccesses (void)
2271 while (work_queue_head)
2273 struct access *racc = pop_access_from_work_queue ();
2274 struct assign_link *link;
2276 gcc_assert (racc->first_link);
2278 for (link = racc->first_link; link; link = link->next)
2280 struct access *lacc = link->lacc;
2282 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2284 lacc = lacc->group_representative;
2285 if (propagate_subaccesses_across_link (lacc, racc)
2286 && lacc->first_link)
2287 add_access_to_work_queue (lacc);
2292 /* Go through all accesses collected throughout the (intraprocedural) analysis
2293 stage, exclude overlapping ones, identify representatives and build trees
2294 out of them, making decisions about scalarization on the way. Return true
2295 iff there are any to-be-scalarized variables after this stage. */
2298 analyze_all_variable_accesses (void)
2301 bitmap tmp = BITMAP_ALLOC (NULL);
2303 unsigned i, max_total_scalarization_size;
2305 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2306 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2308 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2309 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2310 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2312 tree var = referenced_var (i);
2314 if (TREE_CODE (var) == VAR_DECL
2315 && type_consists_of_records_p (TREE_TYPE (var)))
2317 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2318 <= max_total_scalarization_size)
2320 completely_scalarize_var (var);
2321 if (dump_file && (dump_flags & TDF_DETAILS))
2323 fprintf (dump_file, "Will attempt to totally scalarize ");
2324 print_generic_expr (dump_file, var, 0);
2325 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2328 else if (dump_file && (dump_flags & TDF_DETAILS))
2330 fprintf (dump_file, "Too big to totally scalarize: ");
2331 print_generic_expr (dump_file, var, 0);
2332 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2337 bitmap_copy (tmp, candidate_bitmap);
2338 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2340 tree var = referenced_var (i);
2341 struct access *access;
2343 access = sort_and_splice_var_accesses (var);
2344 if (!access || !build_access_trees (access))
2345 disqualify_candidate (var,
2346 "No or inhibitingly overlapping accesses.");
2349 propagate_all_subaccesses ();
2351 bitmap_copy (tmp, candidate_bitmap);
2352 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2354 tree var = referenced_var (i);
2355 struct access *access = get_first_repr_for_decl (var);
2357 if (analyze_access_trees (access))
2360 if (dump_file && (dump_flags & TDF_DETAILS))
2362 fprintf (dump_file, "\nAccess trees for ");
2363 print_generic_expr (dump_file, var, 0);
2364 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2365 dump_access_tree (dump_file, access);
2366 fprintf (dump_file, "\n");
2370 disqualify_candidate (var, "No scalar replacements to be created.");
2377 statistics_counter_event (cfun, "Scalarized aggregates", res);
2384 /* Generate statements copying scalar replacements of accesses within a subtree
2385 into or out of AGG. ACCESS, all its children, siblings and their children
2386 are to be processed. AGG is an aggregate type expression (can be a
2387 declaration but does not have to be, it can for example also be a mem_ref or
2388 a series of handled components). TOP_OFFSET is the offset of the processed
2389 subtree which has to be subtracted from offsets of individual accesses to
2390 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2391 replacements in the interval <start_offset, start_offset + chunk_size>,
2392 otherwise copy all. GSI is a statement iterator used to place the new
2393 statements. WRITE should be true when the statements should write from AGG
2394 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2395 statements will be added after the current statement in GSI, they will be
2396 added before the statement otherwise. */
2399 generate_subtree_copies (struct access *access, tree agg,
2400 HOST_WIDE_INT top_offset,
2401 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2402 gimple_stmt_iterator *gsi, bool write,
2403 bool insert_after, location_t loc)
2407 if (chunk_size && access->offset >= start_offset + chunk_size)
2410 if (access->grp_to_be_replaced
2412 || access->offset + access->size > start_offset))
2414 tree expr, repl = get_access_replacement (access);
2417 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2418 access, gsi, insert_after);
2422 if (access->grp_partial_lhs)
2423 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2425 insert_after ? GSI_NEW_STMT
2427 stmt = gimple_build_assign (repl, expr);
2431 TREE_NO_WARNING (repl) = 1;
2432 if (access->grp_partial_lhs)
2433 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2435 insert_after ? GSI_NEW_STMT
2437 stmt = gimple_build_assign (expr, repl);
2439 gimple_set_location (stmt, loc);
2442 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2444 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2446 sra_stats.subtree_copies++;
2449 if (access->first_child)
2450 generate_subtree_copies (access->first_child, agg, top_offset,
2451 start_offset, chunk_size, gsi,
2452 write, insert_after, loc);
2454 access = access->next_sibling;
2459 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2460 the root of the subtree to be processed. GSI is the statement iterator used
2461 for inserting statements which are added after the current statement if
2462 INSERT_AFTER is true or before it otherwise. */
2465 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2466 bool insert_after, location_t loc)
2469 struct access *child;
2471 if (access->grp_to_be_replaced)
2475 stmt = gimple_build_assign (get_access_replacement (access),
2476 build_zero_cst (access->type));
2478 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2480 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2482 gimple_set_location (stmt, loc);
2485 for (child = access->first_child; child; child = child->next_sibling)
2486 init_subtree_with_zero (child, gsi, insert_after, loc);
2489 /* Search for an access representative for the given expression EXPR and
2490 return it or NULL if it cannot be found. */
2492 static struct access *
2493 get_access_for_expr (tree expr)
2495 HOST_WIDE_INT offset, size, max_size;
2498 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2499 a different size than the size of its argument and we need the latter
2501 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2502 expr = TREE_OPERAND (expr, 0);
2504 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2505 if (max_size == -1 || !DECL_P (base))
2508 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2511 return get_var_base_offset_size_access (base, offset, max_size);
2514 /* Replace the expression EXPR with a scalar replacement if there is one and
2515 generate other statements to do type conversion or subtree copying if
2516 necessary. GSI is used to place newly created statements, WRITE is true if
2517 the expression is being written to (it is on a LHS of a statement or output
2518 in an assembly statement). */
2521 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2524 struct access *access;
2527 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2530 expr = &TREE_OPERAND (*expr, 0);
2535 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2536 expr = &TREE_OPERAND (*expr, 0);
2537 access = get_access_for_expr (*expr);
2540 type = TREE_TYPE (*expr);
2542 loc = gimple_location (gsi_stmt (*gsi));
2543 if (access->grp_to_be_replaced)
2545 tree repl = get_access_replacement (access);
2546 /* If we replace a non-register typed access simply use the original
2547 access expression to extract the scalar component afterwards.
2548 This happens if scalarizing a function return value or parameter
2549 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2550 gcc.c-torture/compile/20011217-1.c.
2552 We also want to use this when accessing a complex or vector which can
2553 be accessed as a different type too, potentially creating a need for
2554 type conversion (see PR42196) and when scalarized unions are involved
2555 in assembler statements (see PR42398). */
2556 if (!useless_type_conversion_p (type, access->type))
2560 ref = build_ref_for_model (loc, access->base, access->offset, access,
2567 if (access->grp_partial_lhs)
2568 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2569 false, GSI_NEW_STMT);
2570 stmt = gimple_build_assign (repl, ref);
2571 gimple_set_location (stmt, loc);
2572 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2578 if (access->grp_partial_lhs)
2579 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2580 true, GSI_SAME_STMT);
2581 stmt = gimple_build_assign (ref, repl);
2582 gimple_set_location (stmt, loc);
2583 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2591 if (access->first_child)
2593 HOST_WIDE_INT start_offset, chunk_size;
2595 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2596 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2598 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2599 start_offset = access->offset
2600 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2603 start_offset = chunk_size = 0;
2605 generate_subtree_copies (access->first_child, access->base, 0,
2606 start_offset, chunk_size, gsi, write, write,
2612 /* Where scalar replacements of the RHS have been written to when a replacement
2613 of a LHS of an assigments cannot be direclty loaded from a replacement of
2615 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2616 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2617 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2619 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2620 base aggregate if there are unscalarized data or directly to LHS of the
2621 statement that is pointed to by GSI otherwise. */
2623 static enum unscalarized_data_handling
2624 handle_unscalarized_data_in_subtree (struct access *top_racc,
2625 gimple_stmt_iterator *gsi)
2627 if (top_racc->grp_unscalarized_data)
2629 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2631 gimple_location (gsi_stmt (*gsi)));
2632 return SRA_UDH_RIGHT;
2636 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2637 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2638 0, 0, gsi, false, false,
2639 gimple_location (gsi_stmt (*gsi)));
2640 return SRA_UDH_LEFT;
2645 /* Try to generate statements to load all sub-replacements in an access subtree
2646 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2647 If that is not possible, refresh the TOP_RACC base aggregate and load the
2648 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2649 copied. NEW_GSI is stmt iterator used for statement insertions after the
2650 original assignment, OLD_GSI is used to insert statements before the
2651 assignment. *REFRESHED keeps the information whether we have needed to
2652 refresh replacements of the LHS and from which side of the assignments this
2656 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2657 HOST_WIDE_INT left_offset,
2658 gimple_stmt_iterator *old_gsi,
2659 gimple_stmt_iterator *new_gsi,
2660 enum unscalarized_data_handling *refreshed)
2662 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2663 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2665 if (lacc->grp_to_be_replaced)
2667 struct access *racc;
2668 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2672 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2673 if (racc && racc->grp_to_be_replaced)
2675 rhs = get_access_replacement (racc);
2676 if (!useless_type_conversion_p (lacc->type, racc->type))
2677 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2681 /* No suitable access on the right hand side, need to load from
2682 the aggregate. See if we have to update it first... */
2683 if (*refreshed == SRA_UDH_NONE)
2684 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2687 if (*refreshed == SRA_UDH_LEFT)
2688 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2691 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2695 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2696 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2697 gimple_set_location (stmt, loc);
2699 sra_stats.subreplacements++;
2701 else if (*refreshed == SRA_UDH_NONE
2702 && lacc->grp_read && !lacc->grp_covered)
2703 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2706 if (lacc->first_child)
2707 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2708 old_gsi, new_gsi, refreshed);
2712 /* Result code for SRA assignment modification. */
2713 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2714 SRA_AM_MODIFIED, /* stmt changed but not
2716 SRA_AM_REMOVED }; /* stmt eliminated */
2718 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2719 to the assignment and GSI is the statement iterator pointing at it. Returns
2720 the same values as sra_modify_assign. */
2722 static enum assignment_mod_result
2723 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2725 tree lhs = gimple_assign_lhs (*stmt);
2729 acc = get_access_for_expr (lhs);
2733 loc = gimple_location (*stmt);
2734 if (VEC_length (constructor_elt,
2735 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2737 /* I have never seen this code path trigger but if it can happen the
2738 following should handle it gracefully. */
2739 if (access_has_children_p (acc))
2740 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2742 return SRA_AM_MODIFIED;
2745 if (acc->grp_covered)
2747 init_subtree_with_zero (acc, gsi, false, loc);
2748 unlink_stmt_vdef (*stmt);
2749 gsi_remove (gsi, true);
2750 return SRA_AM_REMOVED;
2754 init_subtree_with_zero (acc, gsi, true, loc);
2755 return SRA_AM_MODIFIED;
2759 /* Create and return a new suitable default definition SSA_NAME for RACC which
2760 is an access describing an uninitialized part of an aggregate that is being
2764 get_repl_default_def_ssa_name (struct access *racc)
2768 decl = get_unrenamed_access_replacement (racc);
2770 repl = gimple_default_def (cfun, decl);
2773 repl = make_ssa_name (decl, gimple_build_nop ());
2774 set_default_def (decl, repl);
2780 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2784 contains_bitfld_comp_ref_p (const_tree ref)
2786 while (handled_component_p (ref))
2788 if (TREE_CODE (ref) == COMPONENT_REF
2789 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2791 ref = TREE_OPERAND (ref, 0);
2797 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2798 bit-field field declaration somewhere in it. */
2801 contains_vce_or_bfcref_p (const_tree ref)
2803 while (handled_component_p (ref))
2805 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2806 || (TREE_CODE (ref) == COMPONENT_REF
2807 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2809 ref = TREE_OPERAND (ref, 0);
2815 /* Examine both sides of the assignment statement pointed to by STMT, replace
2816 them with a scalare replacement if there is one and generate copying of
2817 replacements if scalarized aggregates have been used in the assignment. GSI
2818 is used to hold generated statements for type conversions and subtree
2821 static enum assignment_mod_result
2822 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2824 struct access *lacc, *racc;
2826 bool modify_this_stmt = false;
2827 bool force_gimple_rhs = false;
2829 gimple_stmt_iterator orig_gsi = *gsi;
2831 if (!gimple_assign_single_p (*stmt))
2833 lhs = gimple_assign_lhs (*stmt);
2834 rhs = gimple_assign_rhs1 (*stmt);
2836 if (TREE_CODE (rhs) == CONSTRUCTOR)
2837 return sra_modify_constructor_assign (stmt, gsi);
2839 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2840 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2841 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2843 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2845 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2847 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2850 lacc = get_access_for_expr (lhs);
2851 racc = get_access_for_expr (rhs);
2855 loc = gimple_location (*stmt);
2856 if (lacc && lacc->grp_to_be_replaced)
2858 lhs = get_access_replacement (lacc);
2859 gimple_assign_set_lhs (*stmt, lhs);
2860 modify_this_stmt = true;
2861 if (lacc->grp_partial_lhs)
2862 force_gimple_rhs = true;
2866 if (racc && racc->grp_to_be_replaced)
2868 rhs = get_access_replacement (racc);
2869 modify_this_stmt = true;
2870 if (racc->grp_partial_lhs)
2871 force_gimple_rhs = true;
2875 if (modify_this_stmt)
2877 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2879 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2880 ??? This should move to fold_stmt which we simply should
2881 call after building a VIEW_CONVERT_EXPR here. */
2882 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2883 && !contains_bitfld_comp_ref_p (lhs)
2884 && !access_has_children_p (lacc))
2886 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2887 gimple_assign_set_lhs (*stmt, lhs);
2889 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2890 && !contains_vce_or_bfcref_p (rhs)
2891 && !access_has_children_p (racc))
2892 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2894 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2896 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2898 if (is_gimple_reg_type (TREE_TYPE (lhs))
2899 && TREE_CODE (lhs) != SSA_NAME)
2900 force_gimple_rhs = true;
2905 /* From this point on, the function deals with assignments in between
2906 aggregates when at least one has scalar reductions of some of its
2907 components. There are three possible scenarios: Both the LHS and RHS have
2908 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2910 In the first case, we would like to load the LHS components from RHS
2911 components whenever possible. If that is not possible, we would like to
2912 read it directly from the RHS (after updating it by storing in it its own
2913 components). If there are some necessary unscalarized data in the LHS,
2914 those will be loaded by the original assignment too. If neither of these
2915 cases happen, the original statement can be removed. Most of this is done
2916 by load_assign_lhs_subreplacements.
2918 In the second case, we would like to store all RHS scalarized components
2919 directly into LHS and if they cover the aggregate completely, remove the
2920 statement too. In the third case, we want the LHS components to be loaded
2921 directly from the RHS (DSE will remove the original statement if it
2924 This is a bit complex but manageable when types match and when unions do
2925 not cause confusion in a way that we cannot really load a component of LHS
2926 from the RHS or vice versa (the access representing this level can have
2927 subaccesses that are accessible only through a different union field at a
2928 higher level - different from the one used in the examined expression).
2931 Therefore, I specially handle a fourth case, happening when there is a
2932 specific type cast or it is impossible to locate a scalarized subaccess on
2933 the other side of the expression. If that happens, I simply "refresh" the
2934 RHS by storing in it is scalarized components leave the original statement
2935 there to do the copying and then load the scalar replacements of the LHS.
2936 This is what the first branch does. */
2938 if (modify_this_stmt
2939 || gimple_has_volatile_ops (*stmt)
2940 || contains_vce_or_bfcref_p (rhs)
2941 || contains_vce_or_bfcref_p (lhs))
2943 if (access_has_children_p (racc))
2944 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2945 gsi, false, false, loc);
2946 if (access_has_children_p (lacc))
2947 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2948 gsi, true, true, loc);
2949 sra_stats.separate_lhs_rhs_handling++;
2953 if (access_has_children_p (lacc) && access_has_children_p (racc))
2955 gimple_stmt_iterator orig_gsi = *gsi;
2956 enum unscalarized_data_handling refreshed;
2958 if (lacc->grp_read && !lacc->grp_covered)
2959 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
2961 refreshed = SRA_UDH_NONE;
2963 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
2964 &orig_gsi, gsi, &refreshed);
2965 if (refreshed != SRA_UDH_RIGHT)
2968 unlink_stmt_vdef (*stmt);
2969 gsi_remove (&orig_gsi, true);
2970 sra_stats.deleted++;
2971 return SRA_AM_REMOVED;
2978 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2982 fprintf (dump_file, "Removing load: ");
2983 print_gimple_stmt (dump_file, *stmt, 0, 0);
2986 if (TREE_CODE (lhs) == SSA_NAME)
2988 rhs = get_repl_default_def_ssa_name (racc);
2989 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2991 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2992 TREE_TYPE (lhs), rhs);
2996 if (racc->first_child)
2997 generate_subtree_copies (racc->first_child, lhs,
2998 racc->offset, 0, 0, gsi,
3001 gcc_assert (*stmt == gsi_stmt (*gsi));
3002 unlink_stmt_vdef (*stmt);
3003 gsi_remove (gsi, true);
3004 sra_stats.deleted++;
3005 return SRA_AM_REMOVED;
3008 else if (racc->first_child)
3009 generate_subtree_copies (racc->first_child, lhs, racc->offset,
3010 0, 0, gsi, false, true, loc);
3012 if (access_has_children_p (lacc))
3013 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3014 0, 0, gsi, true, true, loc);
3018 /* This gimplification must be done after generate_subtree_copies, lest we
3019 insert the subtree copies in the middle of the gimplified sequence. */
3020 if (force_gimple_rhs)
3021 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3022 true, GSI_SAME_STMT);
3023 if (gimple_assign_rhs1 (*stmt) != rhs)
3025 modify_this_stmt = true;
3026 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3027 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3030 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3033 /* Traverse the function body and all modifications as decided in
3034 analyze_all_variable_accesses. Return true iff the CFG has been
3038 sra_modify_function_body (void)
3040 bool cfg_changed = false;
3045 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3046 while (!gsi_end_p (gsi))
3048 gimple stmt = gsi_stmt (gsi);
3049 enum assignment_mod_result assign_result;
3050 bool modified = false, deleted = false;
3054 switch (gimple_code (stmt))
3057 t = gimple_return_retval_ptr (stmt);
3058 if (*t != NULL_TREE)
3059 modified |= sra_modify_expr (t, &gsi, false);
3063 assign_result = sra_modify_assign (&stmt, &gsi);
3064 modified |= assign_result == SRA_AM_MODIFIED;
3065 deleted = assign_result == SRA_AM_REMOVED;
3069 /* Operands must be processed before the lhs. */
3070 for (i = 0; i < gimple_call_num_args (stmt); i++)
3072 t = gimple_call_arg_ptr (stmt, i);
3073 modified |= sra_modify_expr (t, &gsi, false);
3076 if (gimple_call_lhs (stmt))
3078 t = gimple_call_lhs_ptr (stmt);
3079 modified |= sra_modify_expr (t, &gsi, true);
3084 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3086 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3087 modified |= sra_modify_expr (t, &gsi, false);
3089 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3091 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3092 modified |= sra_modify_expr (t, &gsi, true);
3103 if (maybe_clean_eh_stmt (stmt)
3104 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3115 /* Generate statements initializing scalar replacements of parts of function
3119 initialize_parameter_reductions (void)
3121 gimple_stmt_iterator gsi;
3122 gimple_seq seq = NULL;
3125 for (parm = DECL_ARGUMENTS (current_function_decl);
3127 parm = DECL_CHAIN (parm))
3129 VEC (access_p, heap) *access_vec;
3130 struct access *access;
3132 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3134 access_vec = get_base_access_vector (parm);
3140 seq = gimple_seq_alloc ();
3141 gsi = gsi_start (seq);
3144 for (access = VEC_index (access_p, access_vec, 0);
3146 access = access->next_grp)
3147 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3148 EXPR_LOCATION (parm));
3152 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3155 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3156 it reveals there are components of some aggregates to be scalarized, it runs
3157 the required transformations. */
3159 perform_intra_sra (void)
3164 if (!find_var_candidates ())
3167 if (!scan_function ())
3170 if (!analyze_all_variable_accesses ())
3173 if (sra_modify_function_body ())
3174 ret = TODO_update_ssa | TODO_cleanup_cfg;
3176 ret = TODO_update_ssa;
3177 initialize_parameter_reductions ();
3179 statistics_counter_event (cfun, "Scalar replacements created",
3180 sra_stats.replacements);
3181 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3182 statistics_counter_event (cfun, "Subtree copy stmts",
3183 sra_stats.subtree_copies);
3184 statistics_counter_event (cfun, "Subreplacement stmts",
3185 sra_stats.subreplacements);
3186 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3187 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3188 sra_stats.separate_lhs_rhs_handling);
3191 sra_deinitialize ();
3195 /* Perform early intraprocedural SRA. */
3197 early_intra_sra (void)
3199 sra_mode = SRA_MODE_EARLY_INTRA;
3200 return perform_intra_sra ();
3203 /* Perform "late" intraprocedural SRA. */
3205 late_intra_sra (void)
3207 sra_mode = SRA_MODE_INTRA;
3208 return perform_intra_sra ();
3213 gate_intra_sra (void)
3215 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3219 struct gimple_opt_pass pass_sra_early =
3224 gate_intra_sra, /* gate */
3225 early_intra_sra, /* execute */
3228 0, /* static_pass_number */
3229 TV_TREE_SRA, /* tv_id */
3230 PROP_cfg | PROP_ssa, /* properties_required */
3231 0, /* properties_provided */
3232 0, /* properties_destroyed */
3233 0, /* todo_flags_start */
3236 | TODO_verify_ssa /* todo_flags_finish */
3240 struct gimple_opt_pass pass_sra =
3245 gate_intra_sra, /* gate */
3246 late_intra_sra, /* execute */
3249 0, /* static_pass_number */
3250 TV_TREE_SRA, /* tv_id */
3251 PROP_cfg | PROP_ssa, /* properties_required */
3252 0, /* properties_provided */
3253 0, /* properties_destroyed */
3254 TODO_update_address_taken, /* todo_flags_start */
3257 | TODO_verify_ssa /* todo_flags_finish */
3262 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3266 is_unused_scalar_param (tree parm)
3269 return (is_gimple_reg (parm)
3270 && (!(name = gimple_default_def (cfun, parm))
3271 || has_zero_uses (name)));
3274 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3275 examine whether there are any direct or otherwise infeasible ones. If so,
3276 return true, otherwise return false. PARM must be a gimple register with a
3277 non-NULL default definition. */
3280 ptr_parm_has_direct_uses (tree parm)
3282 imm_use_iterator ui;
3284 tree name = gimple_default_def (cfun, parm);
3287 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3290 use_operand_p use_p;
3292 if (is_gimple_debug (stmt))
3295 /* Valid uses include dereferences on the lhs and the rhs. */
3296 if (gimple_has_lhs (stmt))
3298 tree lhs = gimple_get_lhs (stmt);
3299 while (handled_component_p (lhs))
3300 lhs = TREE_OPERAND (lhs, 0);
3301 if (TREE_CODE (lhs) == MEM_REF
3302 && TREE_OPERAND (lhs, 0) == name
3303 && integer_zerop (TREE_OPERAND (lhs, 1))
3304 && types_compatible_p (TREE_TYPE (lhs),
3305 TREE_TYPE (TREE_TYPE (name))))
3308 if (gimple_assign_single_p (stmt))
3310 tree rhs = gimple_assign_rhs1 (stmt);
3311 while (handled_component_p (rhs))
3312 rhs = TREE_OPERAND (rhs, 0);
3313 if (TREE_CODE (rhs) == MEM_REF
3314 && TREE_OPERAND (rhs, 0) == name
3315 && integer_zerop (TREE_OPERAND (rhs, 1))
3316 && types_compatible_p (TREE_TYPE (rhs),
3317 TREE_TYPE (TREE_TYPE (name))))
3320 else if (is_gimple_call (stmt))
3323 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3325 tree arg = gimple_call_arg (stmt, i);
3326 while (handled_component_p (arg))
3327 arg = TREE_OPERAND (arg, 0);
3328 if (TREE_CODE (arg) == MEM_REF
3329 && TREE_OPERAND (arg, 0) == name
3330 && integer_zerop (TREE_OPERAND (arg, 1))
3331 && types_compatible_p (TREE_TYPE (arg),
3332 TREE_TYPE (TREE_TYPE (name))))
3337 /* If the number of valid uses does not match the number of
3338 uses in this stmt there is an unhandled use. */
3339 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3346 BREAK_FROM_IMM_USE_STMT (ui);
3352 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3353 them in candidate_bitmap. Note that these do not necessarily include
3354 parameter which are unused and thus can be removed. Return true iff any
3355 such candidate has been found. */
3358 find_param_candidates (void)
3365 for (parm = DECL_ARGUMENTS (current_function_decl);
3367 parm = DECL_CHAIN (parm))
3369 tree type = TREE_TYPE (parm);
3373 if (TREE_THIS_VOLATILE (parm)
3374 || TREE_ADDRESSABLE (parm)
3375 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3378 if (is_unused_scalar_param (parm))
3384 if (POINTER_TYPE_P (type))
3386 type = TREE_TYPE (type);
3388 if (TREE_CODE (type) == FUNCTION_TYPE
3389 || TYPE_VOLATILE (type)
3390 || (TREE_CODE (type) == ARRAY_TYPE
3391 && TYPE_NONALIASED_COMPONENT (type))
3392 || !is_gimple_reg (parm)
3393 || is_va_list_type (type)
3394 || ptr_parm_has_direct_uses (parm))
3397 else if (!AGGREGATE_TYPE_P (type))
3400 if (!COMPLETE_TYPE_P (type)
3401 || !host_integerp (TYPE_SIZE (type), 1)
3402 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3403 || (AGGREGATE_TYPE_P (type)
3404 && type_internals_preclude_sra_p (type, &msg)))
3407 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3411 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3412 print_generic_expr (dump_file, parm, 0);
3413 fprintf (dump_file, "\n");
3417 func_param_count = count;
3421 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3425 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3428 struct access *repr = (struct access *) data;
3430 repr->grp_maybe_modified = 1;
3434 /* Analyze what representatives (in linked lists accessible from
3435 REPRESENTATIVES) can be modified by side effects of statements in the
3436 current function. */
3439 analyze_modified_params (VEC (access_p, heap) *representatives)
3443 for (i = 0; i < func_param_count; i++)
3445 struct access *repr;
3447 for (repr = VEC_index (access_p, representatives, i);
3449 repr = repr->next_grp)
3451 struct access *access;
3455 if (no_accesses_p (repr))
3457 if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3458 || repr->grp_maybe_modified)
3461 ao_ref_init (&ar, repr->expr);
3462 visited = BITMAP_ALLOC (NULL);
3463 for (access = repr; access; access = access->next_sibling)
3465 /* All accesses are read ones, otherwise grp_maybe_modified would
3466 be trivially set. */
3467 walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3468 mark_maybe_modified, repr, &visited);
3469 if (repr->grp_maybe_modified)
3472 BITMAP_FREE (visited);
3477 /* Propagate distances in bb_dereferences in the opposite direction than the
3478 control flow edges, in each step storing the maximum of the current value
3479 and the minimum of all successors. These steps are repeated until the table
3480 stabilizes. Note that BBs which might terminate the functions (according to
3481 final_bbs bitmap) never updated in this way. */
3484 propagate_dereference_distances (void)
3486 VEC (basic_block, heap) *queue;
3489 queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3490 VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3493 VEC_quick_push (basic_block, queue, bb);
3497 while (!VEC_empty (basic_block, queue))
3501 bool change = false;
3504 bb = VEC_pop (basic_block, queue);
3507 if (bitmap_bit_p (final_bbs, bb->index))
3510 for (i = 0; i < func_param_count; i++)
3512 int idx = bb->index * func_param_count + i;
3514 HOST_WIDE_INT inh = 0;
3516 FOR_EACH_EDGE (e, ei, bb->succs)
3518 int succ_idx = e->dest->index * func_param_count + i;
3520 if (e->src == EXIT_BLOCK_PTR)
3526 inh = bb_dereferences [succ_idx];
3528 else if (bb_dereferences [succ_idx] < inh)
3529 inh = bb_dereferences [succ_idx];
3532 if (!first && bb_dereferences[idx] < inh)
3534 bb_dereferences[idx] = inh;
3539 if (change && !bitmap_bit_p (final_bbs, bb->index))
3540 FOR_EACH_EDGE (e, ei, bb->preds)
3545 e->src->aux = e->src;
3546 VEC_quick_push (basic_block, queue, e->src);
3550 VEC_free (basic_block, heap, queue);
3553 /* Dump a dereferences TABLE with heading STR to file F. */
3556 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3560 fprintf (dump_file, str);
3561 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3563 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3564 if (bb != EXIT_BLOCK_PTR)
3567 for (i = 0; i < func_param_count; i++)
3569 int idx = bb->index * func_param_count + i;
3570 fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3575 fprintf (dump_file, "\n");
3578 /* Determine what (parts of) parameters passed by reference that are not
3579 assigned to are not certainly dereferenced in this function and thus the
3580 dereferencing cannot be safely moved to the caller without potentially
3581 introducing a segfault. Mark such REPRESENTATIVES as
3582 grp_not_necessarilly_dereferenced.
3584 The dereferenced maximum "distance," i.e. the offset + size of the accessed
3585 part is calculated rather than simple booleans are calculated for each
3586 pointer parameter to handle cases when only a fraction of the whole
3587 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3590 The maximum dereference distances for each pointer parameter and BB are
3591 already stored in bb_dereference. This routine simply propagates these
3592 values upwards by propagate_dereference_distances and then compares the
3593 distances of individual parameters in the ENTRY BB to the equivalent
3594 distances of each representative of a (fraction of a) parameter. */
3597 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3602 dump_dereferences_table (dump_file,
3603 "Dereference table before propagation:\n",
3606 propagate_dereference_distances ();
3608 if (dump_file && (dump_flags & TDF_DETAILS))
3609 dump_dereferences_table (dump_file,
3610 "Dereference table after propagation:\n",
3613 for (i = 0; i < func_param_count; i++)
3615 struct access *repr = VEC_index (access_p, representatives, i);
3616 int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3618 if (!repr || no_accesses_p (repr))
3623 if ((repr->offset + repr->size) > bb_dereferences[idx])
3624 repr->grp_not_necessarilly_dereferenced = 1;
3625 repr = repr->next_grp;
3631 /* Return the representative access for the parameter declaration PARM if it is
3632 a scalar passed by reference which is not written to and the pointer value
3633 is not used directly. Thus, if it is legal to dereference it in the caller
3634 and we can rule out modifications through aliases, such parameter should be
3635 turned into one passed by value. Return NULL otherwise. */
3637 static struct access *
3638 unmodified_by_ref_scalar_representative (tree parm)
3640 int i, access_count;
3641 struct access *repr;
3642 VEC (access_p, heap) *access_vec;
3644 access_vec = get_base_access_vector (parm);
3645 gcc_assert (access_vec);
3646 repr = VEC_index (access_p, access_vec, 0);
3649 repr->group_representative = repr;
3651 access_count = VEC_length (access_p, access_vec);
3652 for (i = 1; i < access_count; i++)
3654 struct access *access = VEC_index (access_p, access_vec, i);
3657 access->group_representative = repr;
3658 access->next_sibling = repr->next_sibling;
3659 repr->next_sibling = access;
3663 repr->grp_scalar_ptr = 1;
3667 /* Return true iff this access precludes IPA-SRA of the parameter it is
3671 access_precludes_ipa_sra_p (struct access *access)
3673 /* Avoid issues such as the second simple testcase in PR 42025. The problem
3674 is incompatible assign in a call statement (and possibly even in asm
3675 statements). This can be relaxed by using a new temporary but only for
3676 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3677 intraprocedural SRA we deal with this by keeping the old aggregate around,
3678 something we cannot do in IPA-SRA.) */
3680 && (is_gimple_call (access->stmt)
3681 || gimple_code (access->stmt) == GIMPLE_ASM))
3688 /* Sort collected accesses for parameter PARM, identify representatives for
3689 each accessed region and link them together. Return NULL if there are
3690 different but overlapping accesses, return the special ptr value meaning
3691 there are no accesses for this parameter if that is the case and return the
3692 first representative otherwise. Set *RO_GRP if there is a group of accesses
3693 with only read (i.e. no write) accesses. */
3695 static struct access *
3696 splice_param_accesses (tree parm, bool *ro_grp)
3698 int i, j, access_count, group_count;
3699 int agg_size, total_size = 0;
3700 struct access *access, *res, **prev_acc_ptr = &res;
3701 VEC (access_p, heap) *access_vec;
3703 access_vec = get_base_access_vector (parm);
3705 return &no_accesses_representant;
3706 access_count = VEC_length (access_p, access_vec);
3708 VEC_qsort (access_p, access_vec, compare_access_positions);
3713 while (i < access_count)
3717 access = VEC_index (access_p, access_vec, i);
3718 modification = access->write;
3719 if (access_precludes_ipa_sra_p (access))
3721 a1_alias_type = reference_alias_ptr_type (access->expr);
3723 /* Access is about to become group representative unless we find some
3724 nasty overlap which would preclude us from breaking this parameter
3728 while (j < access_count)
3730 struct access *ac2 = VEC_index (access_p, access_vec, j);
3731 if (ac2->offset != access->offset)
3733 /* All or nothing law for parameters. */
3734 if (access->offset + access->size > ac2->offset)
3739 else if (ac2->size != access->size)
3742 if (access_precludes_ipa_sra_p (ac2)
3743 || (ac2->type != access->type
3744 && (TREE_ADDRESSABLE (ac2->type)
3745 || TREE_ADDRESSABLE (access->type)))
3746 || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3749 modification |= ac2->write;
3750 ac2->group_representative = access;
3751 ac2->next_sibling = access->next_sibling;
3752 access->next_sibling = ac2;
3757 access->grp_maybe_modified = modification;
3760 *prev_acc_ptr = access;
3761 prev_acc_ptr = &access->next_grp;
3762 total_size += access->size;
3766 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3767 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3769 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3770 if (total_size >= agg_size)
3773 gcc_assert (group_count > 0);
3777 /* Decide whether parameters with representative accesses given by REPR should
3778 be reduced into components. */
3781 decide_one_param_reduction (struct access *repr)
3783 int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3788 cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3789 gcc_assert (cur_parm_size > 0);
3791 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3794 agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3799 agg_size = cur_parm_size;
3805 fprintf (dump_file, "Evaluating PARAM group sizes for ");
3806 print_generic_expr (dump_file, parm, 0);
3807 fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3808 for (acc = repr; acc; acc = acc->next_grp)
3809 dump_access (dump_file, acc, true);
3813 new_param_count = 0;
3815 for (; repr; repr = repr->next_grp)
3817 gcc_assert (parm == repr->base);
3819 /* Taking the address of a non-addressable field is verboten. */
3820 if (by_ref && repr->non_addressable)
3823 if (!by_ref || (!repr->grp_maybe_modified
3824 && !repr->grp_not_necessarilly_dereferenced))
3825 total_size += repr->size;
3827 total_size += cur_parm_size;
3832 gcc_assert (new_param_count > 0);
3834 if (optimize_function_for_size_p (cfun))
3835 parm_size_limit = cur_parm_size;
3837 parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3840 if (total_size < agg_size
3841 && total_size <= parm_size_limit)
3844 fprintf (dump_file, " ....will be split into %i components\n",
3846 return new_param_count;
3852 /* The order of the following enums is important, we need to do extra work for
3853 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
3854 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3855 MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3857 /* Identify representatives of all accesses to all candidate parameters for
3858 IPA-SRA. Return result based on what representatives have been found. */
3860 static enum ipa_splicing_result
3861 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3863 enum ipa_splicing_result result = NO_GOOD_ACCESS;
3865 struct access *repr;
3867 *representatives = VEC_alloc (access_p, heap, func_param_count);
3869 for (parm = DECL_ARGUMENTS (current_function_decl);
3871 parm = DECL_CHAIN (parm))
3873 if (is_unused_scalar_param (parm))
3875 VEC_quick_push (access_p, *representatives,
3876 &no_accesses_representant);
3877 if (result == NO_GOOD_ACCESS)
3878 result = UNUSED_PARAMS;
3880 else if (POINTER_TYPE_P (TREE_TYPE (parm))
3881 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3882 && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3884 repr = unmodified_by_ref_scalar_representative (parm);
3885 VEC_quick_push (access_p, *representatives, repr);
3887 result = UNMODIF_BY_REF_ACCESSES;
3889 else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3891 bool ro_grp = false;
3892 repr = splice_param_accesses (parm, &ro_grp);
3893 VEC_quick_push (access_p, *representatives, repr);
3895 if (repr && !no_accesses_p (repr))
3897 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3900 result = UNMODIF_BY_REF_ACCESSES;
3901 else if (result < MODIF_BY_REF_ACCESSES)
3902 result = MODIF_BY_REF_ACCESSES;
3904 else if (result < BY_VAL_ACCESSES)
3905 result = BY_VAL_ACCESSES;
3907 else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3908 result = UNUSED_PARAMS;
3911 VEC_quick_push (access_p, *representatives, NULL);
3914 if (result == NO_GOOD_ACCESS)
3916 VEC_free (access_p, heap, *representatives);
3917 *representatives = NULL;
3918 return NO_GOOD_ACCESS;
3924 /* Return the index of BASE in PARMS. Abort if it is not found. */
3927 get_param_index (tree base, VEC(tree, heap) *parms)
3931 len = VEC_length (tree, parms);
3932 for (i = 0; i < len; i++)
3933 if (VEC_index (tree, parms, i) == base)
3938 /* Convert the decisions made at the representative level into compact
3939 parameter adjustments. REPRESENTATIVES are pointers to first
3940 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3941 final number of adjustments. */
3943 static ipa_parm_adjustment_vec
3944 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3945 int adjustments_count)
3947 VEC (tree, heap) *parms;
3948 ipa_parm_adjustment_vec adjustments;
3952 gcc_assert (adjustments_count > 0);
3953 parms = ipa_get_vector_of_formal_parms (current_function_decl);
3954 adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3955 parm = DECL_ARGUMENTS (current_function_decl);
3956 for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
3958 struct access *repr = VEC_index (access_p, representatives, i);
3960 if (!repr || no_accesses_p (repr))
3962 struct ipa_parm_adjustment *adj;
3964 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3965 memset (adj, 0, sizeof (*adj));
3966 adj->base_index = get_param_index (parm, parms);
3969 adj->copy_param = 1;
3971 adj->remove_param = 1;
3975 struct ipa_parm_adjustment *adj;
3976 int index = get_param_index (parm, parms);
3978 for (; repr; repr = repr->next_grp)
3980 adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3981 memset (adj, 0, sizeof (*adj));
3982 gcc_assert (repr->base == parm);
3983 adj->base_index = index;
3984 adj->base = repr->base;
3985 adj->type = repr->type;
3986 adj->alias_ptr_type = reference_alias_ptr_type (repr->expr);
3987 adj->offset = repr->offset;
3988 adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3989 && (repr->grp_maybe_modified
3990 || repr->grp_not_necessarilly_dereferenced));
3995 VEC_free (tree, heap, parms);
3999 /* Analyze the collected accesses and produce a plan what to do with the
4000 parameters in the form of adjustments, NULL meaning nothing. */
4002 static ipa_parm_adjustment_vec
4003 analyze_all_param_acesses (void)
4005 enum ipa_splicing_result repr_state;
4006 bool proceed = false;
4007 int i, adjustments_count = 0;
4008 VEC (access_p, heap) *representatives;
4009 ipa_parm_adjustment_vec adjustments;
4011 repr_state = splice_all_param_accesses (&representatives);
4012 if (repr_state == NO_GOOD_ACCESS)
4015 /* If there are any parameters passed by reference which are not modified
4016 directly, we need to check whether they can be modified indirectly. */
4017 if (repr_state == UNMODIF_BY_REF_ACCESSES)
4019 analyze_caller_dereference_legality (representatives);
4020 analyze_modified_params (representatives);
4023 for (i = 0; i < func_param_count; i++)
4025 struct access *repr = VEC_index (access_p, representatives, i);
4027 if (repr && !no_accesses_p (repr))
4029 if (repr->grp_scalar_ptr)
4031 adjustments_count++;
4032 if (repr->grp_not_necessarilly_dereferenced
4033 || repr->grp_maybe_modified)
4034 VEC_replace (access_p, representatives, i, NULL);
4038 sra_stats.scalar_by_ref_to_by_val++;
4043 int new_components = decide_one_param_reduction (repr);
4045 if (new_components == 0)
4047 VEC_replace (access_p, representatives, i, NULL);
4048 adjustments_count++;
4052 adjustments_count += new_components;
4053 sra_stats.aggregate_params_reduced++;
4054 sra_stats.param_reductions_created += new_components;
4061 if (no_accesses_p (repr))
4064 sra_stats.deleted_unused_parameters++;
4066 adjustments_count++;
4070 if (!proceed && dump_file)
4071 fprintf (dump_file, "NOT proceeding to change params.\n");
4074 adjustments = turn_representatives_into_adjustments (representatives,
4079 VEC_free (access_p, heap, representatives);
4083 /* If a parameter replacement identified by ADJ does not yet exist in the form
4084 of declaration, create it and record it, otherwise return the previously
4088 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4091 if (!adj->new_ssa_base)
4093 char *pretty_name = make_fancy_name (adj->base);
4095 repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4096 DECL_NAME (repl) = get_identifier (pretty_name);
4097 obstack_free (&name_obstack, pretty_name);
4100 add_referenced_var (repl);
4101 adj->new_ssa_base = repl;
4104 repl = adj->new_ssa_base;
4108 /* Find the first adjustment for a particular parameter BASE in a vector of
4109 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4112 static struct ipa_parm_adjustment *
4113 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4117 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4118 for (i = 0; i < len; i++)
4120 struct ipa_parm_adjustment *adj;
4122 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4123 if (!adj->copy_param && adj->base == base)
4130 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4131 removed because its value is not used, replace the SSA_NAME with a one
4132 relating to a created VAR_DECL together all of its uses and return true.
4133 ADJUSTMENTS is a pointer to an adjustments vector. */
4136 replace_removed_params_ssa_names (gimple stmt,
4137 ipa_parm_adjustment_vec adjustments)
4139 struct ipa_parm_adjustment *adj;
4140 tree lhs, decl, repl, name;
4142 if (gimple_code (stmt) == GIMPLE_PHI)
4143 lhs = gimple_phi_result (stmt);
4144 else if (is_gimple_assign (stmt))
4145 lhs = gimple_assign_lhs (stmt);
4146 else if (is_gimple_call (stmt))
4147 lhs = gimple_call_lhs (stmt);
4151 if (TREE_CODE (lhs) != SSA_NAME)
4153 decl = SSA_NAME_VAR (lhs);
4154 if (TREE_CODE (decl) != PARM_DECL)
4157 adj = get_adjustment_for_base (adjustments, decl);
4161 repl = get_replaced_param_substitute (adj);
4162 name = make_ssa_name (repl, stmt);
4166 fprintf (dump_file, "replacing an SSA name of a removed param ");
4167 print_generic_expr (dump_file, lhs, 0);
4168 fprintf (dump_file, " with ");
4169 print_generic_expr (dump_file, name, 0);
4170 fprintf (dump_file, "\n");
4173 if (is_gimple_assign (stmt))
4174 gimple_assign_set_lhs (stmt, name);
4175 else if (is_gimple_call (stmt))
4176 gimple_call_set_lhs (stmt, name);
4178 gimple_phi_set_result (stmt, name);
4180 replace_uses_by (lhs, name);
4181 release_ssa_name (lhs);
4185 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4186 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4187 specifies whether the function should care about type incompatibility the
4188 current and new expressions. If it is false, the function will leave
4189 incompatibility issues to the caller. Return true iff the expression
4193 sra_ipa_modify_expr (tree *expr, bool convert,
4194 ipa_parm_adjustment_vec adjustments)
4197 struct ipa_parm_adjustment *adj, *cand = NULL;
4198 HOST_WIDE_INT offset, size, max_size;
4201 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4203 if (TREE_CODE (*expr) == BIT_FIELD_REF
4204 || TREE_CODE (*expr) == IMAGPART_EXPR
4205 || TREE_CODE (*expr) == REALPART_EXPR)
4207 expr = &TREE_OPERAND (*expr, 0);
4211 base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4212 if (!base || size == -1 || max_size == -1)
4215 if (TREE_CODE (base) == MEM_REF)
4217 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4218 base = TREE_OPERAND (base, 0);
4221 base = get_ssa_base_param (base);
4222 if (!base || TREE_CODE (base) != PARM_DECL)
4225 for (i = 0; i < len; i++)
4227 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4229 if (adj->base == base &&
4230 (adj->offset == offset || adj->remove_param))
4236 if (!cand || cand->copy_param || cand->remove_param)
4240 src = build_simple_mem_ref (cand->reduction);
4242 src = cand->reduction;
4244 if (dump_file && (dump_flags & TDF_DETAILS))
4246 fprintf (dump_file, "About to replace expr ");
4247 print_generic_expr (dump_file, *expr, 0);
4248 fprintf (dump_file, " with ");
4249 print_generic_expr (dump_file, src, 0);
4250 fprintf (dump_file, "\n");
4253 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4255 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4263 /* If the statement pointed to by STMT_PTR contains any expressions that need
4264 to replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
4265 potential type incompatibilities (GSI is used to accommodate conversion
4266 statements and must point to the statement). Return true iff the statement
4270 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4271 ipa_parm_adjustment_vec adjustments)
4273 gimple stmt = *stmt_ptr;
4274 tree *lhs_p, *rhs_p;
4277 if (!gimple_assign_single_p (stmt))
4280 rhs_p = gimple_assign_rhs1_ptr (stmt);
4281 lhs_p = gimple_assign_lhs_ptr (stmt);
4283 any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4284 any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4287 tree new_rhs = NULL_TREE;
4289 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4291 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4293 /* V_C_Es of constructors can cause trouble (PR 42714). */
4294 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4295 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4297 *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
4300 new_rhs = fold_build1_loc (gimple_location (stmt),
4301 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4304 else if (REFERENCE_CLASS_P (*rhs_p)
4305 && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4306 && !is_gimple_reg (*lhs_p))
4307 /* This can happen when an assignment in between two single field
4308 structures is turned into an assignment in between two pointers to
4309 scalars (PR 42237). */
4314 tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4315 true, GSI_SAME_STMT);
4317 gimple_assign_set_rhs_from_tree (gsi, tmp);
4326 /* Traverse the function body and all modifications as described in
4327 ADJUSTMENTS. Return true iff the CFG has been changed. */
4330 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4332 bool cfg_changed = false;
4337 gimple_stmt_iterator gsi;
4339 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4340 replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4342 gsi = gsi_start_bb (bb);
4343 while (!gsi_end_p (gsi))
4345 gimple stmt = gsi_stmt (gsi);
4346 bool modified = false;
4350 switch (gimple_code (stmt))
4353 t = gimple_return_retval_ptr (stmt);
4354 if (*t != NULL_TREE)
4355 modified |= sra_ipa_modify_expr (t, true, adjustments);
4359 modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4360 modified |= replace_removed_params_ssa_names (stmt, adjustments);
4364 /* Operands must be processed before the lhs. */
4365 for (i = 0; i < gimple_call_num_args (stmt); i++)
4367 t = gimple_call_arg_ptr (stmt, i);
4368 modified |= sra_ipa_modify_expr (t, true, adjustments);
4371 if (gimple_call_lhs (stmt))
4373 t = gimple_call_lhs_ptr (stmt);
4374 modified |= sra_ipa_modify_expr (t, false, adjustments);
4375 modified |= replace_removed_params_ssa_names (stmt,
4381 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4383 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4384 modified |= sra_ipa_modify_expr (t, true, adjustments);
4386 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4388 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4389 modified |= sra_ipa_modify_expr (t, false, adjustments);
4400 if (maybe_clean_eh_stmt (stmt)
4401 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4411 /* Call gimple_debug_bind_reset_value on all debug statements describing
4412 gimple register parameters that are being removed or replaced. */
4415 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4418 gimple_stmt_iterator *gsip = NULL, gsi;
4420 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4422 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4425 len = VEC_length (ipa_parm_adjustment_t, adjustments);
4426 for (i = 0; i < len; i++)
4428 struct ipa_parm_adjustment *adj;
4429 imm_use_iterator ui;
4430 gimple stmt, def_temp;
4431 tree name, vexpr, copy = NULL_TREE;
4432 use_operand_p use_p;
4434 adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4435 if (adj->copy_param || !is_gimple_reg (adj->base))
4437 name = gimple_default_def (cfun, adj->base);
4440 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4442 /* All other users must have been removed by
4443 ipa_sra_modify_function_body. */
4444 gcc_assert (is_gimple_debug (stmt));
4445 if (vexpr == NULL && gsip != NULL)
4447 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4448 vexpr = make_node (DEBUG_EXPR_DECL);
4449 def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4451 DECL_ARTIFICIAL (vexpr) = 1;
4452 TREE_TYPE (vexpr) = TREE_TYPE (name);
4453 DECL_MODE (vexpr) = DECL_MODE (adj->base);
4454 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4458 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4459 SET_USE (use_p, vexpr);
4462 gimple_debug_bind_reset_value (stmt);
4465 /* Create a VAR_DECL for debug info purposes. */
4466 if (!DECL_IGNORED_P (adj->base))
4468 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4469 VAR_DECL, DECL_NAME (adj->base),
4470 TREE_TYPE (adj->base));
4471 if (DECL_PT_UID_SET_P (adj->base))
4472 SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4473 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4474 TREE_READONLY (copy) = TREE_READONLY (adj->base);
4475 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4476 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4477 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4478 DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4479 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4480 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4481 SET_DECL_RTL (copy, 0);
4482 TREE_USED (copy) = 1;
4483 DECL_CONTEXT (copy) = current_function_decl;
4484 add_referenced_var (copy);
4485 add_local_decl (cfun, copy);
4487 BLOCK_VARS (DECL_INITIAL (current_function_decl));
4488 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4490 if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4492 gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4494 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4496 def_temp = gimple_build_debug_source_bind (copy, adj->base,
4498 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4503 /* Return false iff all callers have at least as many actual arguments as there
4504 are formal parameters in the current function. */
4507 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4508 void *data ATTRIBUTE_UNUSED)
4510 struct cgraph_edge *cs;
4511 for (cs = node->callers; cs; cs = cs->next_caller)
4512 if (!callsite_has_enough_arguments_p (cs->call_stmt))
4518 /* Convert all callers of NODE. */
4521 convert_callers_for_node (struct cgraph_node *node,
4524 ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
4525 bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4526 struct cgraph_edge *cs;
4528 for (cs = node->callers; cs; cs = cs->next_caller)
4530 current_function_decl = cs->caller->decl;
4531 push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4534 fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4535 cs->caller->uid, cs->callee->uid,
4536 cgraph_node_name (cs->caller),
4537 cgraph_node_name (cs->callee));
4539 ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4544 for (cs = node->callers; cs; cs = cs->next_caller)
4545 if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4546 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
4547 compute_inline_parameters (cs->caller, true);
4548 BITMAP_FREE (recomputed_callers);
4553 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4556 convert_callers (struct cgraph_node *node, tree old_decl,
4557 ipa_parm_adjustment_vec adjustments)
4559 tree old_cur_fndecl = current_function_decl;
4560 basic_block this_block;
4562 cgraph_for_node_and_aliases (node, convert_callers_for_node,
4563 adjustments, false);
4565 current_function_decl = old_cur_fndecl;
4567 if (!encountered_recursive_call)
4570 FOR_EACH_BB (this_block)
4572 gimple_stmt_iterator gsi;
4574 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4576 gimple stmt = gsi_stmt (gsi);
4578 if (gimple_code (stmt) != GIMPLE_CALL)
4580 call_fndecl = gimple_call_fndecl (stmt);
4581 if (call_fndecl == old_decl)
4584 fprintf (dump_file, "Adjusting recursive call");
4585 gimple_call_set_fndecl (stmt, node->decl);
4586 ipa_modify_call_arguments (NULL, stmt, adjustments);
4594 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4595 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4598 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4600 struct cgraph_node *new_node;
4602 VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
4604 rebuild_cgraph_edges ();
4606 current_function_decl = NULL_TREE;
4608 new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
4609 NULL, NULL, "isra");
4610 current_function_decl = new_node->decl;
4611 push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4613 ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4614 cfg_changed = ipa_sra_modify_function_body (adjustments);
4615 sra_ipa_reset_debug_stmts (adjustments);
4616 convert_callers (new_node, node->decl, adjustments);
4617 cgraph_make_node_local (new_node);
4621 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4622 attributes, return true otherwise. NODE is the cgraph node of the current
4626 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4628 if (!cgraph_node_can_be_local_p (node))
4631 fprintf (dump_file, "Function not local to this compilation unit.\n");
4635 if (!node->local.can_change_signature)
4638 fprintf (dump_file, "Function can not change signature.\n");
4642 if (!tree_versionable_function_p (node->decl))
4645 fprintf (dump_file, "Function is not versionable.\n");
4649 if (DECL_VIRTUAL_P (current_function_decl))
4652 fprintf (dump_file, "Function is a virtual method.\n");
4656 if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4657 && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4660 fprintf (dump_file, "Function too big to be made truly local.\n");
4668 "Function has no callers in this compilation unit.\n");
4675 fprintf (dump_file, "Function uses stdarg. \n");
4679 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4685 /* Perform early interprocedural SRA. */
4688 ipa_early_sra (void)
4690 struct cgraph_node *node = cgraph_get_node (current_function_decl);
4691 ipa_parm_adjustment_vec adjustments;
4694 if (!ipa_sra_preliminary_function_checks (node))
4698 sra_mode = SRA_MODE_EARLY_IPA;
4700 if (!find_param_candidates ())
4703 fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4707 if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4711 fprintf (dump_file, "There are callers with insufficient number of "
4716 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4718 * last_basic_block_for_function (cfun));
4719 final_bbs = BITMAP_ALLOC (NULL);
4722 if (encountered_apply_args)
4725 fprintf (dump_file, "Function calls __builtin_apply_args().\n");
4729 if (encountered_unchangable_recursive_call)
4732 fprintf (dump_file, "Function calls itself with insufficient "
4733 "number of arguments.\n");
4737 adjustments = analyze_all_param_acesses ();
4741 ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4743 if (modify_function (node, adjustments))
4744 ret = TODO_update_ssa | TODO_cleanup_cfg;
4746 ret = TODO_update_ssa;
4747 VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4749 statistics_counter_event (cfun, "Unused parameters deleted",
4750 sra_stats.deleted_unused_parameters);
4751 statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4752 sra_stats.scalar_by_ref_to_by_val);
4753 statistics_counter_event (cfun, "Aggregate parameters broken up",
4754 sra_stats.aggregate_params_reduced);
4755 statistics_counter_event (cfun, "Aggregate parameter components created",
4756 sra_stats.param_reductions_created);
4759 BITMAP_FREE (final_bbs);
4760 free (bb_dereferences);
4762 sra_deinitialize ();
4766 /* Return if early ipa sra shall be performed. */
4768 ipa_early_sra_gate (void)
4770 return flag_ipa_sra && dbg_cnt (eipa_sra);
4773 struct gimple_opt_pass pass_early_ipa_sra =
4777 "eipa_sra", /* name */
4778 ipa_early_sra_gate, /* gate */
4779 ipa_early_sra, /* execute */
4782 0, /* static_pass_number */
4783 TV_IPA_SRA, /* tv_id */
4784 0, /* properties_required */
4785 0, /* properties_provided */
4786 0, /* properties_destroyed */
4787 0, /* todo_flags_start */
4788 TODO_dump_cgraph /* todo_flags_finish */