1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "tree-flow.h"
84 #include "tree-pretty-print.h"
85 #include "statistics.h"
86 #include "tree-dump.h"
92 #include "tree-inline.h"
93 #include "gimple-pretty-print.h"
94 #include "ipa-inline.h"
96 /* Enumeration of all aggregate reductions we can do. */
97 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
98 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
99 SRA_MODE_INTRA }; /* late intraprocedural SRA */
101 /* Global variable describing which aggregate reduction we are performing at
103 static enum sra_mode sra_mode;
107 /* ACCESS represents each access to an aggregate variable (as a whole or a
108 part). It can also represent a group of accesses that refer to exactly the
109 same fragment of an aggregate (i.e. those that have exactly the same offset
110 and size). Such representatives for a single aggregate, once determined,
111 are linked in a linked list and have the group fields set.
113 Moreover, when doing intraprocedural SRA, a tree is built from those
114 representatives (by the means of first_child and next_sibling pointers), in
115 which all items in a subtree are "within" the root, i.e. their offset is
116 greater or equal to offset of the root and offset+size is smaller or equal
117 to offset+size of the root. Children of an access are sorted by offset.
119 Note that accesses to parts of vector and complex number types always
120 represented by an access to the whole complex number or a vector. It is a
121 duty of the modifying functions to replace them appropriately. */
125 /* Values returned by `get_ref_base_and_extent' for each component reference
126 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
127 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
128 HOST_WIDE_INT offset;
132 /* Expression. It is context dependent so do not use it to create new
133 expressions to access the original aggregate. See PR 42154 for a
139 /* The statement this access belongs to. */
142 /* Next group representative for this aggregate. */
143 struct access *next_grp;
145 /* Pointer to the group representative. Pointer to itself if the struct is
146 the representative. */
147 struct access *group_representative;
149 /* If this access has any children (in terms of the definition above), this
150 points to the first one. */
151 struct access *first_child;
153 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
154 described above. In IPA-SRA this is a pointer to the next access
155 belonging to the same group (having the same representative). */
156 struct access *next_sibling;
158 /* Pointers to the first and last element in the linked list of assign
160 struct assign_link *first_link, *last_link;
162 /* Pointer to the next access in the work queue. */
163 struct access *next_queued;
165 /* Replacement variable for this access "region." Never to be accessed
166 directly, always only by the means of get_access_replacement() and only
167 when grp_to_be_replaced flag is set. */
168 tree replacement_decl;
170 /* Is this particular access write access? */
173 /* Is this access an access to a non-addressable field? */
174 unsigned non_addressable : 1;
176 /* Is this access currently in the work queue? */
177 unsigned grp_queued : 1;
179 /* Does this group contain a write access? This flag is propagated down the
181 unsigned grp_write : 1;
183 /* Does this group contain a read access? This flag is propagated down the
185 unsigned grp_read : 1;
187 /* Does this group contain a read access that comes from an assignment
188 statement? This flag is propagated down the access tree. */
189 unsigned grp_assignment_read : 1;
191 /* Does this group contain a write access that comes from an assignment
192 statement? This flag is propagated down the access tree. */
193 unsigned grp_assignment_write : 1;
195 /* Does this group contain a read access through a scalar type? This flag is
196 not propagated in the access tree in any direction. */
197 unsigned grp_scalar_read : 1;
199 /* Does this group contain a write access through a scalar type? This flag
200 is not propagated in the access tree in any direction. */
201 unsigned grp_scalar_write : 1;
203 /* Is this access an artificial one created to scalarize some record
205 unsigned grp_total_scalarization : 1;
207 /* Other passes of the analysis use this bit to make function
208 analyze_access_subtree create scalar replacements for this group if
210 unsigned grp_hint : 1;
212 /* Is the subtree rooted in this access fully covered by scalar
214 unsigned grp_covered : 1;
216 /* If set to true, this access and all below it in an access tree must not be
218 unsigned grp_unscalarizable_region : 1;
220 /* Whether data have been written to parts of the aggregate covered by this
221 access which is not to be scalarized. This flag is propagated up in the
223 unsigned grp_unscalarized_data : 1;
225 /* Does this access and/or group contain a write access through a
227 unsigned grp_partial_lhs : 1;
229 /* Set when a scalar replacement should be created for this variable. We do
230 the decision and creation at different places because create_tmp_var
231 cannot be called from within FOR_EACH_REFERENCED_VAR. */
232 unsigned grp_to_be_replaced : 1;
234 /* Should TREE_NO_WARNING of a replacement be set? */
235 unsigned grp_no_warning : 1;
237 /* Is it possible that the group refers to data which might be (directly or
238 otherwise) modified? */
239 unsigned grp_maybe_modified : 1;
241 /* Set when this is a representative of a pointer to scalar (i.e. by
242 reference) parameter which we consider for turning into a plain scalar
243 (i.e. a by value parameter). */
244 unsigned grp_scalar_ptr : 1;
246 /* Set when we discover that this pointer is not safe to dereference in the
248 unsigned grp_not_necessarilly_dereferenced : 1;
251 typedef struct access *access_p;
253 DEF_VEC_P (access_p);
254 DEF_VEC_ALLOC_P (access_p, heap);
256 /* Alloc pool for allocating access structures. */
257 static alloc_pool access_pool;
259 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
260 are used to propagate subaccesses from rhs to lhs as long as they don't
261 conflict with what is already there. */
264 struct access *lacc, *racc;
265 struct assign_link *next;
268 /* Alloc pool for allocating assign link structures. */
269 static alloc_pool link_pool;
271 /* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
272 static struct pointer_map_t *base_access_vec;
274 /* Bitmap of candidates. */
275 static bitmap candidate_bitmap;
277 /* Bitmap of candidates which we should try to entirely scalarize away and
278 those which cannot be (because they are and need be used as a whole). */
279 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
281 /* Obstack for creation of fancy names. */
282 static struct obstack name_obstack;
284 /* Head of a linked list of accesses that need to have its subaccesses
285 propagated to their assignment counterparts. */
286 static struct access *work_queue_head;
288 /* Number of parameters of the analyzed function when doing early ipa SRA. */
289 static int func_param_count;
291 /* scan_function sets the following to true if it encounters a call to
292 __builtin_apply_args. */
293 static bool encountered_apply_args;
295 /* Set by scan_function when it finds a recursive call. */
296 static bool encountered_recursive_call;
298 /* Set by scan_function when it finds a recursive call with less actual
299 arguments than formal parameters.. */
300 static bool encountered_unchangable_recursive_call;
302 /* This is a table in which for each basic block and parameter there is a
303 distance (offset + size) in that parameter which is dereferenced and
304 accessed in that BB. */
305 static HOST_WIDE_INT *bb_dereferences;
306 /* Bitmap of BBs that can cause the function to "stop" progressing by
307 returning, throwing externally, looping infinitely or calling a function
308 which might abort etc.. */
309 static bitmap final_bbs;
311 /* Representative of no accesses at all. */
312 static struct access no_accesses_representant;
314 /* Predicate to test the special value. */
317 no_accesses_p (struct access *access)
319 return access == &no_accesses_representant;
322 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
323 representative fields are dumped, otherwise those which only describe the
324 individual access are. */
328 /* Number of processed aggregates is readily available in
329 analyze_all_variable_accesses and so is not stored here. */
331 /* Number of created scalar replacements. */
334 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
338 /* Number of statements created by generate_subtree_copies. */
341 /* Number of statements created by load_assign_lhs_subreplacements. */
344 /* Number of times sra_modify_assign has deleted a statement. */
347 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
348 RHS reparately due to type conversions or nonexistent matching
350 int separate_lhs_rhs_handling;
352 /* Number of parameters that were removed because they were unused. */
353 int deleted_unused_parameters;
355 /* Number of scalars passed as parameters by reference that have been
356 converted to be passed by value. */
357 int scalar_by_ref_to_by_val;
359 /* Number of aggregate parameters that were replaced by one or more of their
361 int aggregate_params_reduced;
363 /* Numbber of components created when splitting aggregate parameters. */
364 int param_reductions_created;
368 dump_access (FILE *f, struct access *access, bool grp)
370 fprintf (f, "access { ");
371 fprintf (f, "base = (%d)'", DECL_UID (access->base));
372 print_generic_expr (f, access->base, 0);
373 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
374 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
375 fprintf (f, ", expr = ");
376 print_generic_expr (f, access->expr, 0);
377 fprintf (f, ", type = ");
378 print_generic_expr (f, access->type, 0);
380 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
381 "grp_assignment_write = %d, grp_scalar_read = %d, "
382 "grp_scalar_write = %d, grp_total_scalarization = %d, "
383 "grp_hint = %d, grp_covered = %d, "
384 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
385 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
386 "grp_maybe_modified = %d, "
387 "grp_not_necessarilly_dereferenced = %d\n",
388 access->grp_read, access->grp_write, access->grp_assignment_read,
389 access->grp_assignment_write, access->grp_scalar_read,
390 access->grp_scalar_write, access->grp_total_scalarization,
391 access->grp_hint, access->grp_covered,
392 access->grp_unscalarizable_region, access->grp_unscalarized_data,
393 access->grp_partial_lhs, access->grp_to_be_replaced,
394 access->grp_maybe_modified,
395 access->grp_not_necessarilly_dereferenced);
397 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
398 "grp_partial_lhs = %d\n",
399 access->write, access->grp_total_scalarization,
400 access->grp_partial_lhs);
403 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
406 dump_access_tree_1 (FILE *f, struct access *access, int level)
412 for (i = 0; i < level; i++)
413 fputs ("* ", dump_file);
415 dump_access (f, access, true);
417 if (access->first_child)
418 dump_access_tree_1 (f, access->first_child, level + 1);
420 access = access->next_sibling;
425 /* Dump all access trees for a variable, given the pointer to the first root in
429 dump_access_tree (FILE *f, struct access *access)
431 for (; access; access = access->next_grp)
432 dump_access_tree_1 (f, access, 0);
435 /* Return true iff ACC is non-NULL and has subaccesses. */
438 access_has_children_p (struct access *acc)
440 return acc && acc->first_child;
443 /* Return a vector of pointers to accesses for the variable given in BASE or
444 NULL if there is none. */
446 static VEC (access_p, heap) *
447 get_base_access_vector (tree base)
451 slot = pointer_map_contains (base_access_vec, base);
455 return *(VEC (access_p, heap) **) slot;
458 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
459 in ACCESS. Return NULL if it cannot be found. */
461 static struct access *
462 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
465 while (access && (access->offset != offset || access->size != size))
467 struct access *child = access->first_child;
469 while (child && (child->offset + child->size <= offset))
470 child = child->next_sibling;
477 /* Return the first group representative for DECL or NULL if none exists. */
479 static struct access *
480 get_first_repr_for_decl (tree base)
482 VEC (access_p, heap) *access_vec;
484 access_vec = get_base_access_vector (base);
488 return VEC_index (access_p, access_vec, 0);
491 /* Find an access representative for the variable BASE and given OFFSET and
492 SIZE. Requires that access trees have already been built. Return NULL if
493 it cannot be found. */
495 static struct access *
496 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
499 struct access *access;
501 access = get_first_repr_for_decl (base);
502 while (access && (access->offset + access->size <= offset))
503 access = access->next_grp;
507 return find_access_in_subtree (access, offset, size);
510 /* Add LINK to the linked list of assign links of RACC. */
512 add_link_to_rhs (struct access *racc, struct assign_link *link)
514 gcc_assert (link->racc == racc);
516 if (!racc->first_link)
518 gcc_assert (!racc->last_link);
519 racc->first_link = link;
522 racc->last_link->next = link;
524 racc->last_link = link;
528 /* Move all link structures in their linked list in OLD_RACC to the linked list
531 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
533 if (!old_racc->first_link)
535 gcc_assert (!old_racc->last_link);
539 if (new_racc->first_link)
541 gcc_assert (!new_racc->last_link->next);
542 gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
544 new_racc->last_link->next = old_racc->first_link;
545 new_racc->last_link = old_racc->last_link;
549 gcc_assert (!new_racc->last_link);
551 new_racc->first_link = old_racc->first_link;
552 new_racc->last_link = old_racc->last_link;
554 old_racc->first_link = old_racc->last_link = NULL;
557 /* Add ACCESS to the work queue (which is actually a stack). */
560 add_access_to_work_queue (struct access *access)
562 if (!access->grp_queued)
564 gcc_assert (!access->next_queued);
565 access->next_queued = work_queue_head;
566 access->grp_queued = 1;
567 work_queue_head = access;
571 /* Pop an access from the work queue, and return it, assuming there is one. */
573 static struct access *
574 pop_access_from_work_queue (void)
576 struct access *access = work_queue_head;
578 work_queue_head = access->next_queued;
579 access->next_queued = NULL;
580 access->grp_queued = 0;
585 /* Allocate necessary structures. */
588 sra_initialize (void)
590 candidate_bitmap = BITMAP_ALLOC (NULL);
591 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
592 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
593 gcc_obstack_init (&name_obstack);
594 access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
595 link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
596 base_access_vec = pointer_map_create ();
597 memset (&sra_stats, 0, sizeof (sra_stats));
598 encountered_apply_args = false;
599 encountered_recursive_call = false;
600 encountered_unchangable_recursive_call = false;
603 /* Hook fed to pointer_map_traverse, deallocate stored vectors. */
606 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
607 void *data ATTRIBUTE_UNUSED)
609 VEC (access_p, heap) *access_vec;
610 access_vec = (VEC (access_p, heap) *) *value;
611 VEC_free (access_p, heap, access_vec);
616 /* Deallocate all general structures. */
619 sra_deinitialize (void)
621 BITMAP_FREE (candidate_bitmap);
622 BITMAP_FREE (should_scalarize_away_bitmap);
623 BITMAP_FREE (cannot_scalarize_away_bitmap);
624 free_alloc_pool (access_pool);
625 free_alloc_pool (link_pool);
626 obstack_free (&name_obstack, NULL);
628 pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
629 pointer_map_destroy (base_access_vec);
632 /* Remove DECL from candidates for SRA and write REASON to the dump file if
635 disqualify_candidate (tree decl, const char *reason)
637 bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
639 if (dump_file && (dump_flags & TDF_DETAILS))
641 fprintf (dump_file, "! Disqualifying ");
642 print_generic_expr (dump_file, decl, 0);
643 fprintf (dump_file, " - %s\n", reason);
647 /* Return true iff the type contains a field or an element which does not allow
651 type_internals_preclude_sra_p (tree type, const char **msg)
656 switch (TREE_CODE (type))
660 case QUAL_UNION_TYPE:
661 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
662 if (TREE_CODE (fld) == FIELD_DECL)
664 tree ft = TREE_TYPE (fld);
666 if (TREE_THIS_VOLATILE (fld))
668 *msg = "volatile structure field";
671 if (!DECL_FIELD_OFFSET (fld))
673 *msg = "no structure field offset";
676 if (!DECL_SIZE (fld))
678 *msg = "zero structure field size";
681 if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
683 *msg = "structure field offset not fixed";
686 if (!host_integerp (DECL_SIZE (fld), 1))
688 *msg = "structure field size not fixed";
691 if (AGGREGATE_TYPE_P (ft)
692 && int_bit_position (fld) % BITS_PER_UNIT != 0)
694 *msg = "structure field is bit field";
698 if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
705 et = TREE_TYPE (type);
707 if (TYPE_VOLATILE (et))
709 *msg = "element type is volatile";
713 if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
723 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
724 base variable if it is. Return T if it is not an SSA_NAME. */
727 get_ssa_base_param (tree t)
729 if (TREE_CODE (t) == SSA_NAME)
731 if (SSA_NAME_IS_DEFAULT_DEF (t))
732 return SSA_NAME_VAR (t);
739 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
740 belongs to, unless the BB has already been marked as a potentially
744 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
746 basic_block bb = gimple_bb (stmt);
747 int idx, parm_index = 0;
750 if (bitmap_bit_p (final_bbs, bb->index))
753 for (parm = DECL_ARGUMENTS (current_function_decl);
754 parm && parm != base;
755 parm = DECL_CHAIN (parm))
758 gcc_assert (parm_index < func_param_count);
760 idx = bb->index * func_param_count + parm_index;
761 if (bb_dereferences[idx] < dist)
762 bb_dereferences[idx] = dist;
765 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
766 the three fields. Also add it to the vector of accesses corresponding to
767 the base. Finally, return the new access. */
769 static struct access *
770 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
772 VEC (access_p, heap) *vec;
773 struct access *access;
776 access = (struct access *) pool_alloc (access_pool);
777 memset (access, 0, sizeof (struct access));
779 access->offset = offset;
782 slot = pointer_map_contains (base_access_vec, base);
784 vec = (VEC (access_p, heap) *) *slot;
786 vec = VEC_alloc (access_p, heap, 32);
788 VEC_safe_push (access_p, heap, vec, access);
790 *((struct VEC (access_p,heap) **)
791 pointer_map_insert (base_access_vec, base)) = vec;
796 /* Create and insert access for EXPR. Return created access, or NULL if it is
799 static struct access *
800 create_access (tree expr, gimple stmt, bool write)
802 struct access *access;
803 HOST_WIDE_INT offset, size, max_size;
805 bool ptr, unscalarizable_region = false;
807 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
809 if (sra_mode == SRA_MODE_EARLY_IPA
810 && TREE_CODE (base) == MEM_REF)
812 base = get_ssa_base_param (TREE_OPERAND (base, 0));
820 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
823 if (sra_mode == SRA_MODE_EARLY_IPA)
825 if (size < 0 || size != max_size)
827 disqualify_candidate (base, "Encountered a variable sized access.");
830 if (TREE_CODE (expr) == COMPONENT_REF
831 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
833 disqualify_candidate (base, "Encountered a bit-field access.");
836 gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
839 mark_parm_dereference (base, offset + size, stmt);
843 if (size != max_size)
846 unscalarizable_region = true;
850 disqualify_candidate (base, "Encountered an unconstrained access.");
855 access = create_access_1 (base, offset, size);
857 access->type = TREE_TYPE (expr);
858 access->write = write;
859 access->grp_unscalarizable_region = unscalarizable_region;
862 if (TREE_CODE (expr) == COMPONENT_REF
863 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
864 access->non_addressable = 1;
870 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
871 register types or (recursively) records with only these two kinds of fields.
872 It also returns false if any of these records contains a bit-field. */
875 type_consists_of_records_p (tree type)
879 if (TREE_CODE (type) != RECORD_TYPE)
882 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
883 if (TREE_CODE (fld) == FIELD_DECL)
885 tree ft = TREE_TYPE (fld);
887 if (DECL_BIT_FIELD (fld))
890 if (!is_gimple_reg_type (ft)
891 && !type_consists_of_records_p (ft))
898 /* Create total_scalarization accesses for all scalar type fields in DECL that
899 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
900 must be the top-most VAR_DECL representing the variable, OFFSET must be the
901 offset of DECL within BASE. REF must be the memory reference expression for
905 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
908 tree fld, decl_type = TREE_TYPE (decl);
910 for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
911 if (TREE_CODE (fld) == FIELD_DECL)
913 HOST_WIDE_INT pos = offset + int_bit_position (fld);
914 tree ft = TREE_TYPE (fld);
915 tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
918 if (is_gimple_reg_type (ft))
920 struct access *access;
923 size = tree_low_cst (DECL_SIZE (fld), 1);
924 access = create_access_1 (base, pos, size);
927 access->grp_total_scalarization = 1;
928 /* Accesses for intraprocedural SRA can have their stmt NULL. */
931 completely_scalarize_record (base, fld, pos, nref);
935 /* Create total_scalarization accesses for all scalar type fields in VAR and
936 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
937 type_consists_of_records_p. */
940 completely_scalarize_var (tree var)
942 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
943 struct access *access;
945 access = create_access_1 (var, 0, size);
947 access->type = TREE_TYPE (var);
948 access->grp_total_scalarization = 1;
950 completely_scalarize_record (var, var, 0, var);
953 /* Search the given tree for a declaration by skipping handled components and
954 exclude it from the candidates. */
957 disqualify_base_of_expr (tree t, const char *reason)
959 t = get_base_address (t);
960 if (sra_mode == SRA_MODE_EARLY_IPA
961 && TREE_CODE (t) == MEM_REF)
962 t = get_ssa_base_param (TREE_OPERAND (t, 0));
965 disqualify_candidate (t, reason);
968 /* Scan expression EXPR and create access structures for all accesses to
969 candidates for scalarization. Return the created access or NULL if none is
972 static struct access *
973 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
975 struct access *ret = NULL;
978 if (TREE_CODE (expr) == BIT_FIELD_REF
979 || TREE_CODE (expr) == IMAGPART_EXPR
980 || TREE_CODE (expr) == REALPART_EXPR)
982 expr = TREE_OPERAND (expr, 0);
988 /* We need to dive through V_C_Es in order to get the size of its parameter
989 and not the result type. Ada produces such statements. We are also
990 capable of handling the topmost V_C_E but not any of those buried in other
991 handled components. */
992 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
993 expr = TREE_OPERAND (expr, 0);
995 if (contains_view_convert_expr_p (expr))
997 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1002 switch (TREE_CODE (expr))
1005 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1006 && sra_mode != SRA_MODE_EARLY_IPA)
1014 case ARRAY_RANGE_REF:
1015 ret = create_access (expr, stmt, write);
1022 if (write && partial_ref && ret)
1023 ret->grp_partial_lhs = 1;
1028 /* Scan expression EXPR and create access structures for all accesses to
1029 candidates for scalarization. Return true if any access has been inserted.
1030 STMT must be the statement from which the expression is taken, WRITE must be
1031 true if the expression is a store and false otherwise. */
1034 build_access_from_expr (tree expr, gimple stmt, bool write)
1036 struct access *access;
1038 access = build_access_from_expr_1 (expr, stmt, write);
1041 /* This means the aggregate is accesses as a whole in a way other than an
1042 assign statement and thus cannot be removed even if we had a scalar
1043 replacement for everything. */
1044 if (cannot_scalarize_away_bitmap)
1045 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1051 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1052 modes in which it matters, return true iff they have been disqualified. RHS
1053 may be NULL, in that case ignore it. If we scalarize an aggregate in
1054 intra-SRA we may need to add statements after each statement. This is not
1055 possible if a statement unconditionally has to end the basic block. */
1057 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1059 if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1060 && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1062 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1064 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1070 /* Return true if EXP is a memory reference less aligned than ALIGN. This is
1071 invoked only on strict-alignment targets. */
1074 tree_non_aligned_mem_p (tree exp, unsigned int align)
1076 unsigned int exp_align;
1078 if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
1079 exp = TREE_OPERAND (exp, 0);
1081 if (TREE_CODE (exp) == SSA_NAME || is_gimple_min_invariant (exp))
1084 /* get_object_alignment will fall back to BITS_PER_UNIT if it cannot
1085 compute an explicit alignment. Pretend that dereferenced pointers
1086 are always aligned on strict-alignment targets. */
1087 if (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF)
1088 exp_align = get_object_or_type_alignment (exp);
1090 exp_align = get_object_alignment (exp);
1092 if (exp_align < align)
1098 /* Scan expressions occuring in STMT, create access structures for all accesses
1099 to candidates for scalarization and remove those candidates which occur in
1100 statements or expressions that prevent them from being split apart. Return
1101 true if any access has been inserted. */
1104 build_accesses_from_assign (gimple stmt)
1107 struct access *lacc, *racc;
1109 if (!gimple_assign_single_p (stmt)
1110 /* Scope clobbers don't influence scalarization. */
1111 || gimple_clobber_p (stmt))
1114 lhs = gimple_assign_lhs (stmt);
1115 rhs = gimple_assign_rhs1 (stmt);
1117 if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1120 racc = build_access_from_expr_1 (rhs, stmt, false);
1121 lacc = build_access_from_expr_1 (lhs, stmt, true);
1125 lacc->grp_assignment_write = 1;
1126 if (STRICT_ALIGNMENT
1127 && tree_non_aligned_mem_p (rhs, get_object_alignment (lhs)))
1128 lacc->grp_unscalarizable_region = 1;
1133 racc->grp_assignment_read = 1;
1134 if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1135 && !is_gimple_reg_type (racc->type))
1136 bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1137 if (STRICT_ALIGNMENT
1138 && tree_non_aligned_mem_p (lhs, get_object_alignment (rhs)))
1139 racc->grp_unscalarizable_region = 1;
1143 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1144 && !lacc->grp_unscalarizable_region
1145 && !racc->grp_unscalarizable_region
1146 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1147 /* FIXME: Turn the following line into an assert after PR 40058 is
1149 && lacc->size == racc->size
1150 && useless_type_conversion_p (lacc->type, racc->type))
1152 struct assign_link *link;
1154 link = (struct assign_link *) pool_alloc (link_pool);
1155 memset (link, 0, sizeof (struct assign_link));
1160 add_link_to_rhs (racc, link);
1163 return lacc || racc;
1166 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1167 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1170 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1171 void *data ATTRIBUTE_UNUSED)
1173 op = get_base_address (op);
1176 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1181 /* Return true iff callsite CALL has at least as many actual arguments as there
1182 are formal parameters of the function currently processed by IPA-SRA. */
1185 callsite_has_enough_arguments_p (gimple call)
1187 return gimple_call_num_args (call) >= (unsigned) func_param_count;
1190 /* Scan function and look for interesting expressions and create access
1191 structures for them. Return true iff any access is created. */
1194 scan_function (void)
1201 gimple_stmt_iterator gsi;
1202 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1204 gimple stmt = gsi_stmt (gsi);
1208 if (final_bbs && stmt_can_throw_external (stmt))
1209 bitmap_set_bit (final_bbs, bb->index);
1210 switch (gimple_code (stmt))
1213 t = gimple_return_retval (stmt);
1215 ret |= build_access_from_expr (t, stmt, false);
1217 bitmap_set_bit (final_bbs, bb->index);
1221 ret |= build_accesses_from_assign (stmt);
1225 for (i = 0; i < gimple_call_num_args (stmt); i++)
1226 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1229 if (sra_mode == SRA_MODE_EARLY_IPA)
1231 tree dest = gimple_call_fndecl (stmt);
1232 int flags = gimple_call_flags (stmt);
1236 if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1237 && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1238 encountered_apply_args = true;
1239 if (cgraph_get_node (dest)
1240 == cgraph_get_node (current_function_decl))
1242 encountered_recursive_call = true;
1243 if (!callsite_has_enough_arguments_p (stmt))
1244 encountered_unchangable_recursive_call = true;
1249 && (flags & (ECF_CONST | ECF_PURE)) == 0)
1250 bitmap_set_bit (final_bbs, bb->index);
1253 t = gimple_call_lhs (stmt);
1254 if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1255 ret |= build_access_from_expr (t, stmt, true);
1259 walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1262 bitmap_set_bit (final_bbs, bb->index);
1264 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1266 t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1267 ret |= build_access_from_expr (t, stmt, false);
1269 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1271 t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1272 ret |= build_access_from_expr (t, stmt, true);
1285 /* Helper of QSORT function. There are pointers to accesses in the array. An
1286 access is considered smaller than another if it has smaller offset or if the
1287 offsets are the same but is size is bigger. */
1290 compare_access_positions (const void *a, const void *b)
1292 const access_p *fp1 = (const access_p *) a;
1293 const access_p *fp2 = (const access_p *) b;
1294 const access_p f1 = *fp1;
1295 const access_p f2 = *fp2;
1297 if (f1->offset != f2->offset)
1298 return f1->offset < f2->offset ? -1 : 1;
1300 if (f1->size == f2->size)
1302 if (f1->type == f2->type)
1304 /* Put any non-aggregate type before any aggregate type. */
1305 else if (!is_gimple_reg_type (f1->type)
1306 && is_gimple_reg_type (f2->type))
1308 else if (is_gimple_reg_type (f1->type)
1309 && !is_gimple_reg_type (f2->type))
1311 /* Put any complex or vector type before any other scalar type. */
1312 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1313 && TREE_CODE (f1->type) != VECTOR_TYPE
1314 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1315 || TREE_CODE (f2->type) == VECTOR_TYPE))
1317 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1318 || TREE_CODE (f1->type) == VECTOR_TYPE)
1319 && TREE_CODE (f2->type) != COMPLEX_TYPE
1320 && TREE_CODE (f2->type) != VECTOR_TYPE)
1322 /* Put the integral type with the bigger precision first. */
1323 else if (INTEGRAL_TYPE_P (f1->type)
1324 && INTEGRAL_TYPE_P (f2->type))
1325 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1326 /* Put any integral type with non-full precision last. */
1327 else if (INTEGRAL_TYPE_P (f1->type)
1328 && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1329 != TYPE_PRECISION (f1->type)))
1331 else if (INTEGRAL_TYPE_P (f2->type)
1332 && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1333 != TYPE_PRECISION (f2->type)))
1335 /* Stabilize the sort. */
1336 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1339 /* We want the bigger accesses first, thus the opposite operator in the next
1341 return f1->size > f2->size ? -1 : 1;
1345 /* Append a name of the declaration to the name obstack. A helper function for
1349 make_fancy_decl_name (tree decl)
1353 tree name = DECL_NAME (decl);
1355 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1356 IDENTIFIER_LENGTH (name));
1359 sprintf (buffer, "D%u", DECL_UID (decl));
1360 obstack_grow (&name_obstack, buffer, strlen (buffer));
1364 /* Helper for make_fancy_name. */
1367 make_fancy_name_1 (tree expr)
1374 make_fancy_decl_name (expr);
1378 switch (TREE_CODE (expr))
1381 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1382 obstack_1grow (&name_obstack, '$');
1383 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1387 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1388 obstack_1grow (&name_obstack, '$');
1389 /* Arrays with only one element may not have a constant as their
1391 index = TREE_OPERAND (expr, 1);
1392 if (TREE_CODE (index) != INTEGER_CST)
1394 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1395 obstack_grow (&name_obstack, buffer, strlen (buffer));
1399 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1403 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1404 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1406 obstack_1grow (&name_obstack, '$');
1407 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1408 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1409 obstack_grow (&name_obstack, buffer, strlen (buffer));
1416 gcc_unreachable (); /* we treat these as scalars. */
1423 /* Create a human readable name for replacement variable of ACCESS. */
1426 make_fancy_name (tree expr)
1428 make_fancy_name_1 (expr);
1429 obstack_1grow (&name_obstack, '\0');
1430 return XOBFINISH (&name_obstack, char *);
1433 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1434 EXP_TYPE at the given OFFSET. If BASE is something for which
1435 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1436 to insert new statements either before or below the current one as specified
1437 by INSERT_AFTER. This function is not capable of handling bitfields. */
1440 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1441 tree exp_type, gimple_stmt_iterator *gsi,
1444 tree prev_base = base;
1446 HOST_WIDE_INT base_offset;
1448 gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1450 base = get_addr_base_and_unit_offset (base, &base_offset);
1452 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1453 offset such as array[var_index]. */
1459 gcc_checking_assert (gsi);
1460 tmp = create_tmp_reg (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1461 add_referenced_var (tmp);
1462 tmp = make_ssa_name (tmp, NULL);
1463 addr = build_fold_addr_expr (unshare_expr (prev_base));
1464 STRIP_USELESS_TYPE_CONVERSION (addr);
1465 stmt = gimple_build_assign (tmp, addr);
1466 gimple_set_location (stmt, loc);
1467 SSA_NAME_DEF_STMT (tmp) = stmt;
1469 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1471 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1474 off = build_int_cst (reference_alias_ptr_type (prev_base),
1475 offset / BITS_PER_UNIT);
1478 else if (TREE_CODE (base) == MEM_REF)
1480 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1481 base_offset + offset / BITS_PER_UNIT);
1482 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1483 base = unshare_expr (TREE_OPERAND (base, 0));
1487 off = build_int_cst (reference_alias_ptr_type (base),
1488 base_offset + offset / BITS_PER_UNIT);
1489 base = build_fold_addr_expr (unshare_expr (base));
1492 return fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1495 DEF_VEC_ALLOC_P_STACK (tree);
1496 #define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
1498 /* Construct a memory reference to a part of an aggregate BASE at the given
1499 OFFSET and of the type of MODEL. In case this is a chain of references
1500 to component, the function will replicate the chain of COMPONENT_REFs of
1501 the expression of MODEL to access it. GSI and INSERT_AFTER have the same
1502 meaning as in build_ref_for_offset. */
1505 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1506 struct access *model, gimple_stmt_iterator *gsi,
1509 tree type = model->type, t;
1510 VEC(tree,stack) *cr_stack = NULL;
1512 if (TREE_CODE (model->expr) == COMPONENT_REF)
1514 tree expr = model->expr;
1516 /* Create a stack of the COMPONENT_REFs so later we can walk them in
1517 order from inner to outer. */
1518 cr_stack = VEC_alloc (tree, stack, 6);
1521 tree field = TREE_OPERAND (expr, 1);
1522 tree cr_offset = component_ref_field_offset (expr);
1523 gcc_assert (cr_offset && host_integerp (cr_offset, 1));
1525 offset -= TREE_INT_CST_LOW (cr_offset) * BITS_PER_UNIT;
1526 offset -= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1528 VEC_safe_push (tree, stack, cr_stack, expr);
1530 expr = TREE_OPERAND (expr, 0);
1531 type = TREE_TYPE (expr);
1532 } while (TREE_CODE (expr) == COMPONENT_REF);
1535 t = build_ref_for_offset (loc, base, offset, type, gsi, insert_after);
1537 if (TREE_CODE (model->expr) == COMPONENT_REF)
1542 /* Now replicate the chain of COMPONENT_REFs from inner to outer. */
1543 FOR_EACH_VEC_ELT_REVERSE (tree, cr_stack, i, expr)
1545 tree field = TREE_OPERAND (expr, 1);
1546 t = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field), t, field,
1547 TREE_OPERAND (expr, 2));
1550 VEC_free (tree, stack, cr_stack);
1556 /* Construct a memory reference consisting of component_refs and array_refs to
1557 a part of an aggregate *RES (which is of type TYPE). The requested part
1558 should have type EXP_TYPE at be the given OFFSET. This function might not
1559 succeed, it returns true when it does and only then *RES points to something
1560 meaningful. This function should be used only to build expressions that we
1561 might need to present to user (e.g. in warnings). In all other situations,
1562 build_ref_for_model or build_ref_for_offset should be used instead. */
1565 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1571 tree tr_size, index, minidx;
1572 HOST_WIDE_INT el_size;
1574 if (offset == 0 && exp_type
1575 && types_compatible_p (exp_type, type))
1578 switch (TREE_CODE (type))
1581 case QUAL_UNION_TYPE:
1583 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1585 HOST_WIDE_INT pos, size;
1586 tree expr, *expr_ptr;
1588 if (TREE_CODE (fld) != FIELD_DECL)
1591 pos = int_bit_position (fld);
1592 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1593 tr_size = DECL_SIZE (fld);
1594 if (!tr_size || !host_integerp (tr_size, 1))
1596 size = tree_low_cst (tr_size, 1);
1602 else if (pos > offset || (pos + size) <= offset)
1605 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1608 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1609 offset - pos, exp_type))
1618 tr_size = TYPE_SIZE (TREE_TYPE (type));
1619 if (!tr_size || !host_integerp (tr_size, 1))
1621 el_size = tree_low_cst (tr_size, 1);
1623 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1624 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1626 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1627 if (!integer_zerop (minidx))
1628 index = int_const_binop (PLUS_EXPR, index, minidx);
1629 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1630 NULL_TREE, NULL_TREE);
1631 offset = offset % el_size;
1632 type = TREE_TYPE (type);
1647 /* Return true iff TYPE is stdarg va_list type. */
1650 is_va_list_type (tree type)
1652 return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1655 /* Print message to dump file why a variable was rejected. */
1658 reject (tree var, const char *msg)
1660 if (dump_file && (dump_flags & TDF_DETAILS))
1662 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1663 print_generic_expr (dump_file, var, 0);
1664 fprintf (dump_file, "\n");
1668 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1669 those with type which is suitable for scalarization. */
1672 find_var_candidates (void)
1675 referenced_var_iterator rvi;
1679 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1681 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1683 type = TREE_TYPE (var);
1685 if (!AGGREGATE_TYPE_P (type))
1687 reject (var, "not aggregate");
1690 if (needs_to_live_in_memory (var))
1692 reject (var, "needs to live in memory");
1695 if (TREE_THIS_VOLATILE (var))
1697 reject (var, "is volatile");
1700 if (!COMPLETE_TYPE_P (type))
1702 reject (var, "has incomplete type");
1705 if (!host_integerp (TYPE_SIZE (type), 1))
1707 reject (var, "type size not fixed");
1710 if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1712 reject (var, "type size is zero");
1715 if (type_internals_preclude_sra_p (type, &msg))
1720 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1721 we also want to schedule it rather late. Thus we ignore it in
1723 (sra_mode == SRA_MODE_EARLY_INTRA
1724 && is_va_list_type (type)))
1726 reject (var, "is va_list");
1730 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1732 if (dump_file && (dump_flags & TDF_DETAILS))
1734 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1735 print_generic_expr (dump_file, var, 0);
1736 fprintf (dump_file, "\n");
1744 /* Sort all accesses for the given variable, check for partial overlaps and
1745 return NULL if there are any. If there are none, pick a representative for
1746 each combination of offset and size and create a linked list out of them.
1747 Return the pointer to the first representative and make sure it is the first
1748 one in the vector of accesses. */
1750 static struct access *
1751 sort_and_splice_var_accesses (tree var)
1753 int i, j, access_count;
1754 struct access *res, **prev_acc_ptr = &res;
1755 VEC (access_p, heap) *access_vec;
1757 HOST_WIDE_INT low = -1, high = 0;
1759 access_vec = get_base_access_vector (var);
1762 access_count = VEC_length (access_p, access_vec);
1764 /* Sort by <OFFSET, SIZE>. */
1765 VEC_qsort (access_p, access_vec, compare_access_positions);
1768 while (i < access_count)
1770 struct access *access = VEC_index (access_p, access_vec, i);
1771 bool grp_write = access->write;
1772 bool grp_read = !access->write;
1773 bool grp_scalar_write = access->write
1774 && is_gimple_reg_type (access->type);
1775 bool grp_scalar_read = !access->write
1776 && is_gimple_reg_type (access->type);
1777 bool grp_assignment_read = access->grp_assignment_read;
1778 bool grp_assignment_write = access->grp_assignment_write;
1779 bool multiple_scalar_reads = false;
1780 bool total_scalarization = access->grp_total_scalarization;
1781 bool grp_partial_lhs = access->grp_partial_lhs;
1782 bool first_scalar = is_gimple_reg_type (access->type);
1783 bool unscalarizable_region = access->grp_unscalarizable_region;
1785 if (first || access->offset >= high)
1788 low = access->offset;
1789 high = access->offset + access->size;
1791 else if (access->offset > low && access->offset + access->size > high)
1794 gcc_assert (access->offset >= low
1795 && access->offset + access->size <= high);
1798 while (j < access_count)
1800 struct access *ac2 = VEC_index (access_p, access_vec, j);
1801 if (ac2->offset != access->offset || ac2->size != access->size)
1806 grp_scalar_write = (grp_scalar_write
1807 || is_gimple_reg_type (ac2->type));
1812 if (is_gimple_reg_type (ac2->type))
1814 if (grp_scalar_read)
1815 multiple_scalar_reads = true;
1817 grp_scalar_read = true;
1820 grp_assignment_read |= ac2->grp_assignment_read;
1821 grp_assignment_write |= ac2->grp_assignment_write;
1822 grp_partial_lhs |= ac2->grp_partial_lhs;
1823 unscalarizable_region |= ac2->grp_unscalarizable_region;
1824 total_scalarization |= ac2->grp_total_scalarization;
1825 relink_to_new_repr (access, ac2);
1827 /* If there are both aggregate-type and scalar-type accesses with
1828 this combination of size and offset, the comparison function
1829 should have put the scalars first. */
1830 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1831 ac2->group_representative = access;
1837 access->group_representative = access;
1838 access->grp_write = grp_write;
1839 access->grp_read = grp_read;
1840 access->grp_scalar_read = grp_scalar_read;
1841 access->grp_scalar_write = grp_scalar_write;
1842 access->grp_assignment_read = grp_assignment_read;
1843 access->grp_assignment_write = grp_assignment_write;
1844 access->grp_hint = multiple_scalar_reads || total_scalarization;
1845 access->grp_total_scalarization = total_scalarization;
1846 access->grp_partial_lhs = grp_partial_lhs;
1847 access->grp_unscalarizable_region = unscalarizable_region;
1848 if (access->first_link)
1849 add_access_to_work_queue (access);
1851 *prev_acc_ptr = access;
1852 prev_acc_ptr = &access->next_grp;
1855 gcc_assert (res == VEC_index (access_p, access_vec, 0));
1859 /* Create a variable for the given ACCESS which determines the type, name and a
1860 few other properties. Return the variable declaration and store it also to
1861 ACCESS->replacement. */
1864 create_access_replacement (struct access *access, bool rename)
1868 repl = create_tmp_var (access->type, "SR");
1869 add_referenced_var (repl);
1871 mark_sym_for_renaming (repl);
1873 if (!access->grp_partial_lhs
1874 && (TREE_CODE (access->type) == COMPLEX_TYPE
1875 || TREE_CODE (access->type) == VECTOR_TYPE))
1876 DECL_GIMPLE_REG_P (repl) = 1;
1878 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1879 DECL_ARTIFICIAL (repl) = 1;
1880 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1882 if (DECL_NAME (access->base)
1883 && !DECL_IGNORED_P (access->base)
1884 && !DECL_ARTIFICIAL (access->base))
1886 char *pretty_name = make_fancy_name (access->expr);
1887 tree debug_expr = unshare_expr (access->expr), d;
1889 DECL_NAME (repl) = get_identifier (pretty_name);
1890 obstack_free (&name_obstack, pretty_name);
1892 /* Get rid of any SSA_NAMEs embedded in debug_expr,
1893 as DECL_DEBUG_EXPR isn't considered when looking for still
1894 used SSA_NAMEs and thus they could be freed. All debug info
1895 generation cares is whether something is constant or variable
1896 and that get_ref_base_and_extent works properly on the
1898 for (d = debug_expr; handled_component_p (d); d = TREE_OPERAND (d, 0))
1899 switch (TREE_CODE (d))
1902 case ARRAY_RANGE_REF:
1903 if (TREE_OPERAND (d, 1)
1904 && TREE_CODE (TREE_OPERAND (d, 1)) == SSA_NAME)
1905 TREE_OPERAND (d, 1) = SSA_NAME_VAR (TREE_OPERAND (d, 1));
1906 if (TREE_OPERAND (d, 3)
1907 && TREE_CODE (TREE_OPERAND (d, 3)) == SSA_NAME)
1908 TREE_OPERAND (d, 3) = SSA_NAME_VAR (TREE_OPERAND (d, 3));
1911 if (TREE_OPERAND (d, 2)
1912 && TREE_CODE (TREE_OPERAND (d, 2)) == SSA_NAME)
1913 TREE_OPERAND (d, 2) = SSA_NAME_VAR (TREE_OPERAND (d, 2));
1918 SET_DECL_DEBUG_EXPR (repl, debug_expr);
1919 DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1920 if (access->grp_no_warning)
1921 TREE_NO_WARNING (repl) = 1;
1923 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1926 TREE_NO_WARNING (repl) = 1;
1930 fprintf (dump_file, "Created a replacement for ");
1931 print_generic_expr (dump_file, access->base, 0);
1932 fprintf (dump_file, " offset: %u, size: %u: ",
1933 (unsigned) access->offset, (unsigned) access->size);
1934 print_generic_expr (dump_file, repl, 0);
1935 fprintf (dump_file, "\n");
1937 sra_stats.replacements++;
1942 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
1945 get_access_replacement (struct access *access)
1947 gcc_assert (access->grp_to_be_replaced);
1949 if (!access->replacement_decl)
1950 access->replacement_decl = create_access_replacement (access, true);
1951 return access->replacement_decl;
1954 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1955 not mark it for renaming. */
1958 get_unrenamed_access_replacement (struct access *access)
1960 gcc_assert (!access->grp_to_be_replaced);
1962 if (!access->replacement_decl)
1963 access->replacement_decl = create_access_replacement (access, false);
1964 return access->replacement_decl;
1968 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1969 linked list along the way. Stop when *ACCESS is NULL or the access pointed
1970 to it is not "within" the root. Return false iff some accesses partially
1974 build_access_subtree (struct access **access)
1976 struct access *root = *access, *last_child = NULL;
1977 HOST_WIDE_INT limit = root->offset + root->size;
1979 *access = (*access)->next_grp;
1980 while (*access && (*access)->offset + (*access)->size <= limit)
1983 root->first_child = *access;
1985 last_child->next_sibling = *access;
1986 last_child = *access;
1988 if (!build_access_subtree (access))
1992 if (*access && (*access)->offset < limit)
1998 /* Build a tree of access representatives, ACCESS is the pointer to the first
1999 one, others are linked in a list by the next_grp field. Return false iff
2000 some accesses partially overlap. */
2003 build_access_trees (struct access *access)
2007 struct access *root = access;
2009 if (!build_access_subtree (&access))
2011 root->next_grp = access;
2016 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2020 expr_with_var_bounded_array_refs_p (tree expr)
2022 while (handled_component_p (expr))
2024 if (TREE_CODE (expr) == ARRAY_REF
2025 && !host_integerp (array_ref_low_bound (expr), 0))
2027 expr = TREE_OPERAND (expr, 0);
2032 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2033 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2034 sorts of access flags appropriately along the way, notably always set
2035 grp_read and grp_assign_read according to MARK_READ and grp_write when
2038 Creating a replacement for a scalar access is considered beneficial if its
2039 grp_hint is set (this means we are either attempting total scalarization or
2040 there is more than one direct read access) or according to the following
2043 Access written to through a scalar type (once or more times)
2045 | Written to in an assignment statement
2047 | | Access read as scalar _once_
2049 | | | Read in an assignment statement
2051 | | | | Scalarize Comment
2052 -----------------------------------------------------------------------------
2053 0 0 0 0 No access for the scalar
2054 0 0 0 1 No access for the scalar
2055 0 0 1 0 No Single read - won't help
2056 0 0 1 1 No The same case
2057 0 1 0 0 No access for the scalar
2058 0 1 0 1 No access for the scalar
2059 0 1 1 0 Yes s = *g; return s.i;
2060 0 1 1 1 Yes The same case as above
2061 1 0 0 0 No Won't help
2062 1 0 0 1 Yes s.i = 1; *g = s;
2063 1 0 1 0 Yes s.i = 5; g = s.i;
2064 1 0 1 1 Yes The same case as above
2065 1 1 0 0 No Won't help.
2066 1 1 0 1 Yes s.i = 1; *g = s;
2067 1 1 1 0 Yes s = *g; return s.i;
2068 1 1 1 1 Yes Any of the above yeses */
2071 analyze_access_subtree (struct access *root, struct access *parent,
2072 bool allow_replacements)
2074 struct access *child;
2075 HOST_WIDE_INT limit = root->offset + root->size;
2076 HOST_WIDE_INT covered_to = root->offset;
2077 bool scalar = is_gimple_reg_type (root->type);
2078 bool hole = false, sth_created = false;
2082 if (parent->grp_read)
2084 if (parent->grp_assignment_read)
2085 root->grp_assignment_read = 1;
2086 if (parent->grp_write)
2087 root->grp_write = 1;
2088 if (parent->grp_assignment_write)
2089 root->grp_assignment_write = 1;
2090 if (parent->grp_total_scalarization)
2091 root->grp_total_scalarization = 1;
2094 if (root->grp_unscalarizable_region)
2095 allow_replacements = false;
2097 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2098 allow_replacements = false;
2100 for (child = root->first_child; child; child = child->next_sibling)
2102 hole |= covered_to < child->offset;
2103 sth_created |= analyze_access_subtree (child, root,
2104 allow_replacements && !scalar);
2106 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2107 root->grp_total_scalarization &= child->grp_total_scalarization;
2108 if (child->grp_covered)
2109 covered_to += child->size;
2114 if (allow_replacements && scalar && !root->first_child
2116 || ((root->grp_scalar_read || root->grp_assignment_read)
2117 && (root->grp_scalar_write || root->grp_assignment_write))))
2119 bool new_integer_type;
2120 if (TREE_CODE (root->type) == ENUMERAL_TYPE)
2122 tree rt = root->type;
2123 root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
2124 TYPE_UNSIGNED (rt));
2125 new_integer_type = true;
2128 new_integer_type = false;
2130 if (dump_file && (dump_flags & TDF_DETAILS))
2132 fprintf (dump_file, "Marking ");
2133 print_generic_expr (dump_file, root->base, 0);
2134 fprintf (dump_file, " offset: %u, size: %u ",
2135 (unsigned) root->offset, (unsigned) root->size);
2136 fprintf (dump_file, " to be replaced%s.\n",
2137 new_integer_type ? " with an integer": "");
2140 root->grp_to_be_replaced = 1;
2146 if (covered_to < limit)
2149 root->grp_total_scalarization = 0;
2153 && (!hole || root->grp_total_scalarization))
2155 root->grp_covered = 1;
2158 if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2159 root->grp_unscalarized_data = 1; /* not covered and written to */
2165 /* Analyze all access trees linked by next_grp by the means of
2166 analyze_access_subtree. */
2168 analyze_access_trees (struct access *access)
2174 if (analyze_access_subtree (access, NULL, true))
2176 access = access->next_grp;
2182 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2183 SIZE would conflict with an already existing one. If exactly such a child
2184 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2187 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2188 HOST_WIDE_INT size, struct access **exact_match)
2190 struct access *child;
2192 for (child = lacc->first_child; child; child = child->next_sibling)
2194 if (child->offset == norm_offset && child->size == size)
2196 *exact_match = child;
2200 if (child->offset < norm_offset + size
2201 && child->offset + child->size > norm_offset)
2208 /* Create a new child access of PARENT, with all properties just like MODEL
2209 except for its offset and with its grp_write false and grp_read true.
2210 Return the new access or NULL if it cannot be created. Note that this access
2211 is created long after all splicing and sorting, it's not located in any
2212 access vector and is automatically a representative of its group. */
2214 static struct access *
2215 create_artificial_child_access (struct access *parent, struct access *model,
2216 HOST_WIDE_INT new_offset)
2218 struct access *access;
2219 struct access **child;
2220 tree expr = parent->base;
2222 gcc_assert (!model->grp_unscalarizable_region);
2224 access = (struct access *) pool_alloc (access_pool);
2225 memset (access, 0, sizeof (struct access));
2226 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2229 access->grp_no_warning = true;
2230 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2231 new_offset, model, NULL, false);
2234 access->base = parent->base;
2235 access->expr = expr;
2236 access->offset = new_offset;
2237 access->size = model->size;
2238 access->type = model->type;
2239 access->grp_write = true;
2240 access->grp_read = false;
2242 child = &parent->first_child;
2243 while (*child && (*child)->offset < new_offset)
2244 child = &(*child)->next_sibling;
2246 access->next_sibling = *child;
2253 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2254 true if any new subaccess was created. Additionally, if RACC is a scalar
2255 access but LACC is not, change the type of the latter, if possible. */
2258 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2260 struct access *rchild;
2261 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2264 if (is_gimple_reg_type (lacc->type)
2265 || lacc->grp_unscalarizable_region
2266 || racc->grp_unscalarizable_region)
2269 if (is_gimple_reg_type (racc->type))
2271 if (!lacc->first_child && !racc->first_child)
2273 tree t = lacc->base;
2275 lacc->type = racc->type;
2276 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2277 lacc->offset, racc->type))
2281 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2282 lacc->base, lacc->offset,
2284 lacc->grp_no_warning = true;
2290 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2292 struct access *new_acc = NULL;
2293 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2295 if (rchild->grp_unscalarizable_region)
2298 if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2303 rchild->grp_hint = 1;
2304 new_acc->grp_hint |= new_acc->grp_read;
2305 if (rchild->first_child)
2306 ret |= propagate_subaccesses_across_link (new_acc, rchild);
2311 rchild->grp_hint = 1;
2312 new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2316 if (racc->first_child)
2317 propagate_subaccesses_across_link (new_acc, rchild);
2324 /* Propagate all subaccesses across assignment links. */
2327 propagate_all_subaccesses (void)
2329 while (work_queue_head)
2331 struct access *racc = pop_access_from_work_queue ();
2332 struct assign_link *link;
2334 gcc_assert (racc->first_link);
2336 for (link = racc->first_link; link; link = link->next)
2338 struct access *lacc = link->lacc;
2340 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2342 lacc = lacc->group_representative;
2343 if (propagate_subaccesses_across_link (lacc, racc)
2344 && lacc->first_link)
2345 add_access_to_work_queue (lacc);
2350 /* Go through all accesses collected throughout the (intraprocedural) analysis
2351 stage, exclude overlapping ones, identify representatives and build trees
2352 out of them, making decisions about scalarization on the way. Return true
2353 iff there are any to-be-scalarized variables after this stage. */
2356 analyze_all_variable_accesses (void)
2359 bitmap tmp = BITMAP_ALLOC (NULL);
2361 unsigned i, max_total_scalarization_size;
2363 max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2364 * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2366 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2367 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2368 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2370 tree var = referenced_var (i);
2372 if (TREE_CODE (var) == VAR_DECL
2373 && type_consists_of_records_p (TREE_TYPE (var)))
2375 if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2376 <= max_total_scalarization_size)
2378 completely_scalarize_var (var);
2379 if (dump_file && (dump_flags & TDF_DETAILS))
2381 fprintf (dump_file, "Will attempt to totally scalarize ");
2382 print_generic_expr (dump_file, var, 0);
2383 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2386 else if (dump_file && (dump_flags & TDF_DETAILS))
2388 fprintf (dump_file, "Too big to totally scalarize: ");
2389 print_generic_expr (dump_file, var, 0);
2390 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2395 bitmap_copy (tmp, candidate_bitmap);
2396 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2398 tree var = referenced_var (i);
2399 struct access *access;
2401 access = sort_and_splice_var_accesses (var);
2402 if (!access || !build_access_trees (access))
2403 disqualify_candidate (var,
2404 "No or inhibitingly overlapping accesses.");
2407 propagate_all_subaccesses ();
2409 bitmap_copy (tmp, candidate_bitmap);
2410 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2412 tree var = referenced_var (i);
2413 struct access *access = get_first_repr_for_decl (var);
2415 if (analyze_access_trees (access))
2418 if (dump_file && (dump_flags & TDF_DETAILS))
2420 fprintf (dump_file, "\nAccess trees for ");
2421 print_generic_expr (dump_file, var, 0);
2422 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2423 dump_access_tree (dump_file, access);
2424 fprintf (dump_file, "\n");
2428 disqualify_candidate (var, "No scalar replacements to be created.");
2435 statistics_counter_event (cfun, "Scalarized aggregates", res);
2442 /* Generate statements copying scalar replacements of accesses within a subtree
2443 into or out of AGG. ACCESS, all its children, siblings and their children
2444 are to be processed. AGG is an aggregate type expression (can be a
2445 declaration but does not have to be, it can for example also be a mem_ref or
2446 a series of handled components). TOP_OFFSET is the offset of the processed
2447 subtree which has to be subtracted from offsets of individual accesses to
2448 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2449 replacements in the interval <start_offset, start_offset + chunk_size>,
2450 otherwise copy all. GSI is a statement iterator used to place the new
2451 statements. WRITE should be true when the statements should write from AGG
2452 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2453 statements will be added after the current statement in GSI, they will be
2454 added before the statement otherwise. */
2457 generate_subtree_copies (struct access *access, tree agg,
2458 HOST_WIDE_INT top_offset,
2459 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2460 gimple_stmt_iterator *gsi, bool write,
2461 bool insert_after, location_t loc)
2465 if (chunk_size && access->offset >= start_offset + chunk_size)
2468 if (access->grp_to_be_replaced
2470 || access->offset + access->size > start_offset))
2472 tree expr, repl = get_access_replacement (access);
2475 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2476 access, gsi, insert_after);
2480 if (access->grp_partial_lhs)
2481 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2483 insert_after ? GSI_NEW_STMT
2485 stmt = gimple_build_assign (repl, expr);
2489 TREE_NO_WARNING (repl) = 1;
2490 if (access->grp_partial_lhs)
2491 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2493 insert_after ? GSI_NEW_STMT
2495 stmt = gimple_build_assign (expr, repl);
2497 gimple_set_location (stmt, loc);
2500 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2502 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2504 sra_stats.subtree_copies++;
2507 if (access->first_child)
2508 generate_subtree_copies (access->first_child, agg, top_offset,
2509 start_offset, chunk_size, gsi,
2510 write, insert_after, loc);
2512 access = access->next_sibling;
2517 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2518 the root of the subtree to be processed. GSI is the statement iterator used
2519 for inserting statements which are added after the current statement if
2520 INSERT_AFTER is true or before it otherwise. */
2523 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2524 bool insert_after, location_t loc)
2527 struct access *child;
2529 if (access->grp_to_be_replaced)
2533 stmt = gimple_build_assign (get_access_replacement (access),
2534 build_zero_cst (access->type));
2536 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2538 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2540 gimple_set_location (stmt, loc);
2543 for (child = access->first_child; child; child = child->next_sibling)
2544 init_subtree_with_zero (child, gsi, insert_after, loc);
2547 /* Search for an access representative for the given expression EXPR and
2548 return it or NULL if it cannot be found. */
2550 static struct access *
2551 get_access_for_expr (tree expr)
2553 HOST_WIDE_INT offset, size, max_size;
2556 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2557 a different size than the size of its argument and we need the latter
2559 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2560 expr = TREE_OPERAND (expr, 0);
2562 base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2563 if (max_size == -1 || !DECL_P (base))
2566 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2569 return get_var_base_offset_size_access (base, offset, max_size);
2572 /* Replace the expression EXPR with a scalar replacement if there is one and
2573 generate other statements to do type conversion or subtree copying if
2574 necessary. GSI is used to place newly created statements, WRITE is true if
2575 the expression is being written to (it is on a LHS of a statement or output
2576 in an assembly statement). */
2579 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2582 struct access *access;
2585 if (TREE_CODE (*expr) == BIT_FIELD_REF)
2588 expr = &TREE_OPERAND (*expr, 0);
2593 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2594 expr = &TREE_OPERAND (*expr, 0);
2595 access = get_access_for_expr (*expr);
2598 type = TREE_TYPE (*expr);
2600 loc = gimple_location (gsi_stmt (*gsi));
2601 if (access->grp_to_be_replaced)
2603 tree repl = get_access_replacement (access);
2604 /* If we replace a non-register typed access simply use the original
2605 access expression to extract the scalar component afterwards.
2606 This happens if scalarizing a function return value or parameter
2607 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2608 gcc.c-torture/compile/20011217-1.c.
2610 We also want to use this when accessing a complex or vector which can
2611 be accessed as a different type too, potentially creating a need for
2612 type conversion (see PR42196) and when scalarized unions are involved
2613 in assembler statements (see PR42398). */
2614 if (!useless_type_conversion_p (type, access->type))
2618 ref = build_ref_for_model (loc, access->base, access->offset, access,
2625 if (access->grp_partial_lhs)
2626 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2627 false, GSI_NEW_STMT);
2628 stmt = gimple_build_assign (repl, ref);
2629 gimple_set_location (stmt, loc);
2630 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2636 if (access->grp_partial_lhs)
2637 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2638 true, GSI_SAME_STMT);
2639 stmt = gimple_build_assign (ref, repl);
2640 gimple_set_location (stmt, loc);
2641 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2649 if (access->first_child)
2651 HOST_WIDE_INT start_offset, chunk_size;
2653 && host_integerp (TREE_OPERAND (bfr, 1), 1)
2654 && host_integerp (TREE_OPERAND (bfr, 2), 1))
2656 chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2657 start_offset = access->offset
2658 + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2661 start_offset = chunk_size = 0;
2663 generate_subtree_copies (access->first_child, access->base, 0,
2664 start_offset, chunk_size, gsi, write, write,
2670 /* Where scalar replacements of the RHS have been written to when a replacement
2671 of a LHS of an assigments cannot be direclty loaded from a replacement of
2673 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
2674 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2675 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2677 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2678 base aggregate if there are unscalarized data or directly to LHS of the
2679 statement that is pointed to by GSI otherwise. */
2681 static enum unscalarized_data_handling
2682 handle_unscalarized_data_in_subtree (struct access *top_racc,
2683 gimple_stmt_iterator *gsi)
2685 if (top_racc->grp_unscalarized_data)
2687 generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2689 gimple_location (gsi_stmt (*gsi)));
2690 return SRA_UDH_RIGHT;
2694 tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2695 generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2696 0, 0, gsi, false, false,
2697 gimple_location (gsi_stmt (*gsi)));
2698 return SRA_UDH_LEFT;
2703 /* Try to generate statements to load all sub-replacements in an access subtree
2704 formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2705 If that is not possible, refresh the TOP_RACC base aggregate and load the
2706 accesses from it. LEFT_OFFSET is the offset of the left whole subtree being
2707 copied. NEW_GSI is stmt iterator used for statement insertions after the
2708 original assignment, OLD_GSI is used to insert statements before the
2709 assignment. *REFRESHED keeps the information whether we have needed to
2710 refresh replacements of the LHS and from which side of the assignments this
2714 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2715 HOST_WIDE_INT left_offset,
2716 gimple_stmt_iterator *old_gsi,
2717 gimple_stmt_iterator *new_gsi,
2718 enum unscalarized_data_handling *refreshed)
2720 location_t loc = gimple_location (gsi_stmt (*old_gsi));
2721 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2723 if (lacc->grp_to_be_replaced)
2725 struct access *racc;
2726 HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2730 racc = find_access_in_subtree (top_racc, offset, lacc->size);
2731 if (racc && racc->grp_to_be_replaced)
2733 rhs = get_access_replacement (racc);
2734 if (!useless_type_conversion_p (lacc->type, racc->type))
2735 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2737 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2738 rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2739 true, GSI_SAME_STMT);
2743 /* No suitable access on the right hand side, need to load from
2744 the aggregate. See if we have to update it first... */
2745 if (*refreshed == SRA_UDH_NONE)
2746 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2749 if (*refreshed == SRA_UDH_LEFT)
2750 rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2753 rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2755 if (lacc->grp_partial_lhs)
2756 rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2757 false, GSI_NEW_STMT);
2760 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2761 gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2762 gimple_set_location (stmt, loc);
2764 sra_stats.subreplacements++;
2766 else if (*refreshed == SRA_UDH_NONE
2767 && lacc->grp_read && !lacc->grp_covered)
2768 *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2771 if (lacc->first_child)
2772 load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2773 old_gsi, new_gsi, refreshed);
2777 /* Result code for SRA assignment modification. */
2778 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
2779 SRA_AM_MODIFIED, /* stmt changed but not
2781 SRA_AM_REMOVED }; /* stmt eliminated */
2783 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
2784 to the assignment and GSI is the statement iterator pointing at it. Returns
2785 the same values as sra_modify_assign. */
2787 static enum assignment_mod_result
2788 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2790 tree lhs = gimple_assign_lhs (*stmt);
2794 acc = get_access_for_expr (lhs);
2798 loc = gimple_location (*stmt);
2799 if (VEC_length (constructor_elt,
2800 CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2802 /* I have never seen this code path trigger but if it can happen the
2803 following should handle it gracefully. */
2804 if (access_has_children_p (acc))
2805 generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2807 return SRA_AM_MODIFIED;
2810 if (acc->grp_covered)
2812 init_subtree_with_zero (acc, gsi, false, loc);
2813 unlink_stmt_vdef (*stmt);
2814 gsi_remove (gsi, true);
2815 return SRA_AM_REMOVED;
2819 init_subtree_with_zero (acc, gsi, true, loc);
2820 return SRA_AM_MODIFIED;
2824 /* Create and return a new suitable default definition SSA_NAME for RACC which
2825 is an access describing an uninitialized part of an aggregate that is being
2829 get_repl_default_def_ssa_name (struct access *racc)
2833 decl = get_unrenamed_access_replacement (racc);
2835 repl = gimple_default_def (cfun, decl);
2838 repl = make_ssa_name (decl, gimple_build_nop ());
2839 set_default_def (decl, repl);
2845 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
2849 contains_bitfld_comp_ref_p (const_tree ref)
2851 while (handled_component_p (ref))
2853 if (TREE_CODE (ref) == COMPONENT_REF
2854 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
2856 ref = TREE_OPERAND (ref, 0);
2862 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2863 bit-field field declaration somewhere in it. */
2866 contains_vce_or_bfcref_p (const_tree ref)
2868 while (handled_component_p (ref))
2870 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2871 || (TREE_CODE (ref) == COMPONENT_REF
2872 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
2874 ref = TREE_OPERAND (ref, 0);
2880 /* Examine both sides of the assignment statement pointed to by STMT, replace
2881 them with a scalare replacement if there is one and generate copying of
2882 replacements if scalarized aggregates have been used in the assignment. GSI
2883 is used to hold generated statements for type conversions and subtree
2886 static enum assignment_mod_result
2887 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2889 struct access *lacc, *racc;
2891 bool modify_this_stmt = false;
2892 bool force_gimple_rhs = false;
2894 gimple_stmt_iterator orig_gsi = *gsi;
2896 if (!gimple_assign_single_p (*stmt))
2898 lhs = gimple_assign_lhs (*stmt);
2899 rhs = gimple_assign_rhs1 (*stmt);
2901 if (TREE_CODE (rhs) == CONSTRUCTOR)
2902 return sra_modify_constructor_assign (stmt, gsi);
2904 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2905 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2906 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2908 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2910 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2912 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
2915 lacc = get_access_for_expr (lhs);
2916 racc = get_access_for_expr (rhs);
2920 loc = gimple_location (*stmt);
2921 if (lacc && lacc->grp_to_be_replaced)
2923 lhs = get_access_replacement (lacc);
2924 gimple_assign_set_lhs (*stmt, lhs);
2925 modify_this_stmt = true;
2926 if (lacc->grp_partial_lhs)
2927 force_gimple_rhs = true;
2931 if (racc && racc->grp_to_be_replaced)
2933 rhs = get_access_replacement (racc);
2934 modify_this_stmt = true;
2935 if (racc->grp_partial_lhs)
2936 force_gimple_rhs = true;
2940 if (modify_this_stmt)
2942 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2944 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2945 ??? This should move to fold_stmt which we simply should
2946 call after building a VIEW_CONVERT_EXPR here. */
2947 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2948 && !contains_bitfld_comp_ref_p (lhs)
2949 && !access_has_children_p (lacc))
2951 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
2952 gimple_assign_set_lhs (*stmt, lhs);
2954 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2955 && !contains_vce_or_bfcref_p (rhs)
2956 && !access_has_children_p (racc))
2957 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
2959 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2961 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
2963 if (is_gimple_reg_type (TREE_TYPE (lhs))
2964 && TREE_CODE (lhs) != SSA_NAME)
2965 force_gimple_rhs = true;
2970 /* From this point on, the function deals with assignments in between
2971 aggregates when at least one has scalar reductions of some of its
2972 components. There are three possible scenarios: Both the LHS and RHS have
2973 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2975 In the first case, we would like to load the LHS components from RHS
2976 components whenever possible. If that is not possible, we would like to
2977 read it directly from the RHS (after updating it by storing in it its own
2978 components). If there are some necessary unscalarized data in the LHS,
2979 those will be loaded by the original assignment too. If neither of these
2980 cases happen, the original statement can be removed. Most of this is done
2981 by load_assign_lhs_subreplacements.
2983 In the second case, we would like to store all RHS scalarized components
2984 directly into LHS and if they cover the aggregate completely, remove the
2985 statement too. In the third case, we want the LHS components to be loaded
2986 directly from the RHS (DSE will remove the original statement if it
2989 This is a bit complex but manageable when types match and when unions do
2990 not cause confusion in a way that we cannot really load a component of LHS
2991 from the RHS or vice versa (the access representing this level can have
2992 subaccesses that are accessible only through a different union field at a
2993 higher level - different from the one used in the examined expression).
2996 Therefore, I specially handle a fourth case, happening when there is a
2997 specific type cast or it is impossible to locate a scalarized subaccess on
2998 the other side of the expression. If that happens, I simply "refresh" the
2999 RHS by storing in it is scalarized components leave the original statement
3000 there to do the copying and then load the scalar replacements of the LHS.
3001 This is what the first branch does. */
3003 if (modify_this_stmt
3004 || gimple_has_volatile_ops (*stmt)
3005 || contains_vce_or_bfcref_p (rhs)
3006 || contains_vce_or_bfcref_p (lhs))
3008 if (access_has_children_p (racc))
3009 generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3010 gsi, false, false, loc);
3011 if (access_has_children_p (lacc))
3012 generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3013 gsi, true, true, loc);
3014 sra_stats.separate_lhs_rhs_handling++;
3018 if (access_has_children_p (lacc) && access_has_children_p (racc))
3020 gimple_stmt_iterator orig_gsi = *gsi;
3021 enum unscalarized_data_handling refreshed;
3023 if (lacc->grp_read && !lacc->grp_covered)
3024 refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3026 refreshed = SRA_UDH_NONE;
3028 load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3029 &orig_gsi, gsi, &refreshed);
3030 if (refreshed != SRA_UDH_RIGHT)
3033 unlink_stmt_vdef (*stmt);
3034 gsi_remove (&orig_gsi, true);
3035 sra_stats.deleted++;
3036 return SRA_AM_REMOVED;
3043 if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
3047 fprintf (dump_file, "Removing load: ");
3048 print_gimple_stmt (dump_file, *stmt, 0, 0);
3051 if (TREE_CODE (lhs) == SSA_NAME)
3053 rhs = get_repl_default_def_ssa_name (racc);
3054 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3056 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3057 TREE_TYPE (lhs), rhs);
3061 if (racc->first_child)
3062 generate_subtree_copies (racc->first_child, lhs,
3063 racc->offset, 0, 0, gsi,
3066 gcc_assert (*stmt == gsi_stmt (*gsi));
3067 unlink_stmt_vdef (*stmt);
3068 gsi_remove (gsi, true);
3069 sra_stats.deleted++;
3070 return SRA_AM_REMOVED;
3073 else if (racc->first_child)
3074 generate_subtree_copies (racc->first_child, lhs, racc->offset,
3075 0, 0, gsi, false, true, loc);
3077 if (access_has_children_p (lacc))
3078 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3079 0, 0, gsi, true, true, loc);
3083 /* This gimplification must be done after generate_subtree_copies, lest we
3084 insert the subtree copies in the middle of the gimplified sequence. */
3085 if (force_gimple_rhs)
3086 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3087 true, GSI_SAME_STMT);
3088 if (gimple_assign_rhs1 (*stmt) != rhs)
3090 modify_this_stmt = true;
3091 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3092 gcc_assert (*stmt == gsi_stmt (orig_gsi));
3095 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3098 /* Traverse the function body and all modifications as decided in
3099 analyze_all_variable_accesses. Return true iff the CFG has been
3103 sra_modify_function_body (void)
3105 bool cfg_changed = false;
3110 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3111 while (!gsi_end_p (gsi))
3113 gimple stmt = gsi_stmt (gsi);
3114 enum assignment_mod_result assign_result;
3115 bool modified = false, deleted = false;
3119 switch (gimple_code (stmt))
3122 t = gimple_return_retval_ptr (stmt);
3123 if (*t != NULL_TREE)
3124 modified |= sra_modify_expr (t, &gsi, false);
3128 assign_result = sra_modify_assign (&stmt, &gsi);
3129 modified |= assign_result == SRA_AM_MODIFIED;
3130 deleted = assign_result == SRA_AM_REMOVED;
3134 /* Operands must be processed before the lhs. */
3135 for (i = 0; i < gimple_call_num_args (stmt); i++)
3137 t = gimple_call_arg_ptr (stmt, i);
3138 modified |= sra_modify_expr (t, &gsi, false);
3141 if (gimple_call_lhs (stmt))
3143 t = gimple_call_lhs_ptr (stmt);
3144 modified |= sra_modify_expr (t, &gsi, true);
3149 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3151 t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3152 modified |= sra_modify_expr (t, &gsi, false);
3154 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3156 t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3157 modified |= sra_modify_expr (t, &gsi, true);
3168 if (maybe_clean_eh_stmt (stmt)
3169 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3180 /* Generate statements initializing scalar replacements of parts of function
3184 initialize_parameter_reductions (void)
3186 gimple_stmt_iterator gsi;
3187 gimple_seq seq = NULL;
3190 for (parm = DECL_ARGUMENTS (current_function_decl);
3192 parm = DECL_CHAIN (parm))
3194 VEC (access_p, heap) *access_vec;
3195 struct access *access;
3197 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3199 access_vec = get_base_access_vector (parm);
3205 seq = gimple_seq_alloc ();
3206 gsi = gsi_start (seq);
3209 for (access = VEC_index (access_p, access_vec, 0);
3211 access = access->next_grp)
3212 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3213 EXPR_LOCATION (parm));
3217 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3220 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3221 it reveals there are components of some aggregates to be scalarized, it runs
3222 the required transformations. */
3224 perform_intra_sra (void)
3229 if (!find_var_candidates ())
3232 if (!scan_function ())
3235 if (!analyze_all_variable_accesses ())
3238 if (sra_modify_function_body ())
3239 ret = TODO_update_ssa | TODO_cleanup_cfg;
3241 ret = TODO_update_ssa;
3242 initialize_parameter_reductions ();
3244 statistics_counter_event (cfun, "Scalar replacements created",
3245 sra_stats.replacements);
3246 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3247 statistics_counter_event (cfun, "Subtree copy stmts",
3248 sra_stats.subtree_copies);
3249 statistics_counter_event (cfun, "Subreplacement stmts",
3250 sra_stats.subreplacements);
3251 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3252 statistics_counter_event (cfun, "Separate LHS and RHS handling",
3253 sra_stats.separate_lhs_rhs_handling);
3256 sra_deinitialize ();
3260 /* Perform early intraprocedural SRA. */
3262 early_intra_sra (void)
3264 sra_mode = SRA_MODE_EARLY_INTRA;
3265 return perform_intra_sra ();
3268 /* Perform "late" intraprocedural SRA. */
3270 late_intra_sra (void)
3272 sra_mode = SRA_MODE_INTRA;
3273 return perform_intra_sra ();
3278 gate_intra_sra (void)
3280 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3284 struct gimple_opt_pass pass_sra_early =
3289 gate_intra_sra, /* gate */
3290 early_intra_sra, /* execute */
3293 0, /* static_pass_number */
3294 TV_TREE_SRA, /* tv_id */
3295 PROP_cfg | PROP_ssa, /* properties_required */
3296 0, /* properties_provided */
3297 0, /* properties_destroyed */
3298 0, /* todo_flags_start */
3301 | TODO_verify_ssa /* todo_flags_finish */
3305 struct gimple_opt_pass pass_sra =
3310 gate_intra_sra, /* gate */
3311 late_intra_sra, /* execute */
3314 0, /* static_pass_number */
3315 TV_TREE_SRA, /* tv_id */
3316 PROP_cfg | PROP_ssa, /* properties_required */
3317 0, /* properties_provided */
3318 0, /* properties_destroyed */
3319 TODO_update_address_taken, /* todo_flags_start */
3322 | TODO_verify_ssa /* todo_flags_finish */
3327 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3331 is_unused_scalar_param (tree parm)
3334 return (is_gimple_reg (parm)
3335 && (!(name = gimple_default_def (cfun, parm))
3336 || has_zero_uses (name)));
3339 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3340 examine whether there are any direct or otherwise infeasible ones. If so,
3341 return true, otherwise return false. PARM must be a gimple register with a
3342 non-NULL default definition. */
3345 ptr_parm_has_direct_uses (tree parm)
3347 imm_use_iterator ui;
3349 tree name = gimple_default_def (cfun, parm);
3352 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3355 use_operand_p use_p;
3357 if (is_gimple_debug (stmt))
3360 /* Valid uses include dereferences on the lhs and the rhs. */
3361 if (gimple_has_lhs (stmt))
3363 tree lhs = gimple_get_lhs (stmt);
3364 while (handled_component_p (lhs))
3365 lhs = TREE_OPERAND (lhs, 0);
3366 if (TREE_CODE (lhs) == MEM_REF
3367 && TREE_OPERAND (lhs, 0) == name
3368 && integer_zerop (TREE_OPERAND (lhs, 1))
3369 && types_compatible_p (TREE_TYPE (lhs),
3370 TREE_TYPE (TREE_TYPE (name)))
3371 && !TREE_THIS_VOLATILE (lhs))
3374 if (gimple_assign_single_p (stmt))
3376 tree rhs = gimple_assign_rhs1 (stmt);
3377 while (handled_component_p (rhs))
3378 rhs = TREE_OPERAND (rhs, 0);
3379 if (TREE_CODE (rhs) == MEM_REF
3380 && TREE_OPERAND (rhs, 0) == name
3381 && integer_zerop (TREE_OPERAND (rhs, 1))
3382 && types_compatible_p (TREE_TYPE (rhs),
3383 TREE_TYPE (TREE_TYPE (name)))
3384 && !TREE_THIS_VOLATILE (rhs))
3387 else if (is_gimple_call (stmt))
3390 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3392 tree arg = gimple_call_arg (stmt, i);
3393 while (handled_component_p (arg))
3394 arg = TREE_OPERAND (arg, 0);
3395 if (TREE_CODE (arg) == MEM_REF
3396 && TREE_OPERAND (arg, 0) == name
3397 && integer_zerop (TREE_OPERAND (arg, 1))
3398 && types_compatible_p (TREE_TYPE (arg),
3399 TREE_TYPE (TREE_TYPE (name)))
3400 && !TREE_THIS_VOLATILE (arg))
3405 /* If the number of valid uses does not match the number of
3406 uses in this stmt there is an unhandled use. */
3407 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3414 BREAK_FROM_IMM_USE_STMT (ui);
3420 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3421 them in candidate_bitmap. Note that these do not necessarily include
3422 parameter which are unused and thus can be removed. Return true iff any
3423 such candidate has been found. */
3426 find_param_candidates (void)
3433 for (parm = DECL_ARGUMENTS (current_function_decl);
3435 parm = DECL_CHAIN (parm))
3437 tree type = TREE_TYPE (parm);
3441 if (TREE_THIS_VOLATILE (parm)
3442 || TREE_ADDRESSABLE (parm)
3443 || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3446 if (is_unused_scalar_param (parm))
3452 if (POINTER_TYPE_P (type))
3454 type = TREE_TYPE (type);
3456 if (TREE_CODE (type) == FUNCTION_TYPE
3457 || TYPE_VOLATILE (type)
3458 || (TREE_CODE (type) == ARRAY_TYPE
3459 && TYPE_NONALIASED_COMPONENT (type))
3460 || !is_gimple_reg (parm)
3461 || is_va_list_type (type)
3462 || ptr_parm_has_direct_uses (parm))
3465 else if (!AGGREGATE_TYPE_P (type))
3468 if (!COMPLETE_TYPE_P (type)
3469 || !host_integerp (TYPE_SIZE (type), 1)
3470 || tree_low_cst (TYPE_SIZE (type), 1) == 0
3471 || (AGGREGATE_TYPE_P (type)
3472 && type_internals_preclude_sra_p (type, &msg)))
3475 bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3477 if (dump_file && (dump_flags & TDF_DETAILS))
3479 fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3480 print_generic_expr (dump_file, parm, 0);
3481 fprintf (dump_file, "\n");
3485 func_param_count = count;
3489 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3493 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3496 struct access *repr = (struct access *) data;
3498 repr->grp_maybe_modified = 1;
3502 /* Analyze what representatives (in linked lists accessible from
3503 REPRESENTATIVES) can be modified by side effects of statements in the
3504 current function. */
3507 analyze_modified_params (VEC (access_p, heap) *representatives)
3511 for (i = 0; i < func_param_count; i++)
3513 struct access *repr;
3515 for (repr = VEC_index (access_p, representatives, i);
3517 repr = repr->next_grp)
3519 struct access *access;