1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
102 #define opf_non_specific (1 << 3)
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active = false;
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
125 static void get_expr_operands (tree, tree *, int);
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
134 /* Return the DECL_UID of the base variable of T. */
136 static inline unsigned
137 get_name_decl (tree t)
139 if (TREE_CODE (t) != SSA_NAME)
142 return DECL_UID (SSA_NAME_VAR (t));
146 /* Comparison function for qsort used in operand_build_sort_virtual. */
149 operand_build_cmp (const void *p, const void *q)
151 tree e1 = *((const tree *)p);
152 tree e2 = *((const tree *)q);
155 u1 = get_name_decl (e1);
156 u2 = get_name_decl (e2);
158 /* We want to sort in ascending order. They can never be equal. */
159 #ifdef ENABLE_CHECKING
160 gcc_assert (u1 != u2);
162 return (u1 > u2 ? 1 : -1);
166 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
169 operand_build_sort_virtual (VEC(tree,heap) *list)
171 int num = VEC_length (tree, list);
178 if (get_name_decl (VEC_index (tree, list, 0))
179 > get_name_decl (VEC_index (tree, list, 1)))
181 /* Swap elements if in the wrong order. */
182 tree tmp = VEC_index (tree, list, 0);
183 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
184 VEC_replace (tree, list, 1, tmp);
189 /* There are 3 or more elements, call qsort. */
190 qsort (VEC_address (tree, list),
191 VEC_length (tree, list),
197 /* Return true if the SSA operands cache is active. */
200 ssa_operands_active (void)
206 /* Structure storing statistics on how many call clobbers we have, and
207 how many where avoided. */
211 /* Number of call-clobbered ops we attempt to add to calls in
212 add_call_clobber_ops. */
213 unsigned int clobbered_vars;
215 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
216 not_written information. */
217 unsigned int static_write_clobbers_avoided;
219 /* Number of reads (VUSEs) avoided by using not_read information. */
220 unsigned int static_read_clobbers_avoided;
222 /* Number of write-clobbers avoided because the variable can't escape to
224 unsigned int unescapable_clobbers_avoided;
226 /* Number of read-only uses we attempt to add to calls in
227 add_call_read_ops. */
228 unsigned int readonly_clobbers;
230 /* Number of read-only uses we avoid using not_read information. */
231 unsigned int static_readonly_clobbers_avoided;
235 /* Initialize the operand cache routines. */
238 init_ssa_operands (void)
240 build_defs = VEC_alloc (tree, heap, 5);
241 build_uses = VEC_alloc (tree, heap, 10);
242 build_vuses = VEC_alloc (tree, heap, 25);
243 build_v_may_defs = VEC_alloc (tree, heap, 25);
244 build_v_must_defs = VEC_alloc (tree, heap, 25);
246 gcc_assert (operand_memory == NULL);
247 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
249 memset (&clobber_stats, 0, sizeof (clobber_stats));
253 /* Dispose of anything required by the operand routines. */
256 fini_ssa_operands (void)
258 struct ssa_operand_memory_d *ptr;
259 VEC_free (tree, heap, build_defs);
260 VEC_free (tree, heap, build_uses);
261 VEC_free (tree, heap, build_v_must_defs);
262 VEC_free (tree, heap, build_v_may_defs);
263 VEC_free (tree, heap, build_vuses);
268 free_mustdefs = NULL;
269 while ((ptr = operand_memory) != NULL)
271 operand_memory = operand_memory->next;
277 if (dump_file && (dump_flags & TDF_STATS))
279 fprintf (dump_file, "Original clobbered vars:%d\n",
280 clobber_stats.clobbered_vars);
281 fprintf (dump_file, "Static write clobbers avoided:%d\n",
282 clobber_stats.static_write_clobbers_avoided);
283 fprintf (dump_file, "Static read clobbers avoided:%d\n",
284 clobber_stats.static_read_clobbers_avoided);
285 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
286 clobber_stats.unescapable_clobbers_avoided);
287 fprintf (dump_file, "Original read-only clobbers:%d\n",
288 clobber_stats.readonly_clobbers);
289 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
290 clobber_stats.static_readonly_clobbers_avoided);
295 /* Return memory for operands of SIZE chunks. */
298 ssa_operand_alloc (unsigned size)
301 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
303 struct ssa_operand_memory_d *ptr;
304 ptr = GGC_NEW (struct ssa_operand_memory_d);
305 ptr->next = operand_memory;
306 operand_memory = ptr;
307 operand_memory_index = 0;
309 ptr = &(operand_memory->mem[operand_memory_index]);
310 operand_memory_index += size;
315 /* Make sure PTR is in the correct immediate use list. Since uses are simply
316 pointers into the stmt TREE, there is no way of telling if anyone has
317 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
318 The contents are different, but the pointer is still the same. This
319 routine will check to make sure PTR is in the correct list, and if it isn't
320 put it in the correct list. We cannot simply check the previous node
321 because all nodes in the same stmt might have be changed. */
324 correct_use_link (use_operand_p ptr, tree stmt)
329 /* fold_stmt may have changed the stmt pointers. */
330 if (ptr->stmt != stmt)
336 /* Find the root element, making sure we skip any safe iterators. */
337 while (prev->use != NULL || prev->stmt == NULL)
340 /* Get the SSA_NAME of the list the node is in. */
343 /* If it's the right list, simply return. */
344 if (root == *(ptr->use))
348 /* It is in the wrong list if we reach here. */
349 delink_imm_use (ptr);
350 link_imm_use (ptr, *(ptr->use));
354 /* This routine makes sure that PTR is in an immediate use list, and makes
355 sure the stmt pointer is set to the current stmt. Virtual uses do not need
356 the overhead of correct_use_link since they cannot be directly manipulated
357 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
360 set_virtual_use_link (use_operand_p ptr, tree stmt)
362 /* fold_stmt may have changed the stmt pointers. */
363 if (ptr->stmt != stmt)
366 /* If this use isn't in a list, add it to the correct list. */
368 link_imm_use (ptr, *(ptr->use));
372 #define FINALIZE_OPBUILD build_defs
373 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
375 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
377 #define FINALIZE_FUNC finalize_ssa_def_ops
378 #define FINALIZE_ALLOC alloc_def
379 #define FINALIZE_FREE free_defs
380 #define FINALIZE_TYPE struct def_optype_d
381 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
382 #define FINALIZE_OPS DEF_OPS
383 #define FINALIZE_BASE(VAR) VAR
384 #define FINALIZE_BASE_TYPE tree *
385 #define FINALIZE_BASE_ZERO NULL
386 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
387 #include "tree-ssa-opfinalize.h"
390 /* This routine will create stmt operands for STMT from the def build list. */
393 finalize_ssa_defs (tree stmt)
395 unsigned int num = VEC_length (tree, build_defs);
397 /* There should only be a single real definition per assignment. */
398 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
400 /* If there is an old list, often the new list is identical, or close, so
401 find the elements at the beginning that are the same as the vector. */
402 finalize_ssa_def_ops (stmt);
403 VEC_truncate (tree, build_defs, 0);
406 #define FINALIZE_OPBUILD build_uses
407 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
409 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
411 #define FINALIZE_FUNC finalize_ssa_use_ops
412 #define FINALIZE_ALLOC alloc_use
413 #define FINALIZE_FREE free_uses
414 #define FINALIZE_TYPE struct use_optype_d
415 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
416 #define FINALIZE_OPS USE_OPS
417 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
418 #define FINALIZE_CORRECT_USE correct_use_link
419 #define FINALIZE_BASE(VAR) VAR
420 #define FINALIZE_BASE_TYPE tree *
421 #define FINALIZE_BASE_ZERO NULL
422 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
423 (PTR)->use_ptr.use = (VAL); \
424 link_imm_use_stmt (&((PTR)->use_ptr), \
426 #include "tree-ssa-opfinalize.h"
428 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
431 finalize_ssa_uses (tree stmt)
433 #ifdef ENABLE_CHECKING
436 unsigned num = VEC_length (tree, build_uses);
438 /* If the pointer to the operand is the statement itself, something is
439 wrong. It means that we are pointing to a local variable (the
440 initial call to update_stmt_operands does not pass a pointer to a
442 for (x = 0; x < num; x++)
443 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
446 finalize_ssa_use_ops (stmt);
447 VEC_truncate (tree, build_uses, 0);
451 /* Return a new V_MAY_DEF operand vector for STMT, comparing to OLD_OPS_P. */
452 #define FINALIZE_OPBUILD build_v_may_defs
453 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
454 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
455 build_v_may_defs, (I)))
456 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
457 #define FINALIZE_ALLOC alloc_maydef
458 #define FINALIZE_FREE free_maydefs
459 #define FINALIZE_TYPE struct maydef_optype_d
460 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
461 #define FINALIZE_OPS MAYDEF_OPS
462 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
463 #define FINALIZE_CORRECT_USE set_virtual_use_link
464 #define FINALIZE_BASE_ZERO 0
465 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
466 #define FINALIZE_BASE_TYPE unsigned
467 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
468 (PTR)->def_var = (VAL); \
469 (PTR)->use_var = (VAL); \
470 (PTR)->use_ptr.use = &((PTR)->use_var); \
471 link_imm_use_stmt (&((PTR)->use_ptr), \
473 #include "tree-ssa-opfinalize.h"
477 finalize_ssa_v_may_defs (tree stmt)
479 finalize_ssa_v_may_def_ops (stmt);
483 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
486 cleanup_v_may_defs (void)
489 num = VEC_length (tree, build_v_may_defs);
491 for (x = 0; x < num; x++)
493 tree t = VEC_index (tree, build_v_may_defs, x);
494 if (TREE_CODE (t) != SSA_NAME)
496 var_ann_t ann = var_ann (t);
497 ann->in_v_may_def_list = 0;
500 VEC_truncate (tree, build_v_may_defs, 0);
504 #define FINALIZE_OPBUILD build_vuses
505 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
506 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
508 #define FINALIZE_FUNC finalize_ssa_vuse_ops
509 #define FINALIZE_ALLOC alloc_vuse
510 #define FINALIZE_FREE free_vuses
511 #define FINALIZE_TYPE struct vuse_optype_d
512 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
513 #define FINALIZE_OPS VUSE_OPS
514 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
515 #define FINALIZE_CORRECT_USE set_virtual_use_link
516 #define FINALIZE_BASE_ZERO 0
517 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
518 #define FINALIZE_BASE_TYPE unsigned
519 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
520 (PTR)->use_var = (VAL); \
521 (PTR)->use_ptr.use = &((PTR)->use_var); \
522 link_imm_use_stmt (&((PTR)->use_ptr), \
524 #include "tree-ssa-opfinalize.h"
527 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
530 finalize_ssa_vuses (tree stmt)
532 unsigned num, num_v_may_defs;
535 /* Remove superfluous VUSE operands. If the statement already has a
536 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
537 not needed because V_MAY_DEFs imply a VUSE of the variable. For
538 instance, suppose that variable 'a' is aliased:
541 # a_3 = V_MAY_DEF <a_2>
544 The VUSE <a_2> is superfluous because it is implied by the
545 V_MAY_DEF operation. */
546 num = VEC_length (tree, build_vuses);
547 num_v_may_defs = VEC_length (tree, build_v_may_defs);
549 if (num > 0 && num_v_may_defs > 0)
551 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
554 vuse = VEC_index (tree, build_vuses, vuse_index);
555 if (TREE_CODE (vuse) != SSA_NAME)
557 var_ann_t ann = var_ann (vuse);
558 ann->in_vuse_list = 0;
559 if (ann->in_v_may_def_list)
561 VEC_ordered_remove (tree, build_vuses, vuse_index);
570 /* Clear out the in_list bits. */
572 vuse_index < VEC_length (tree, build_vuses);
575 tree t = VEC_index (tree, build_vuses, vuse_index);
576 if (TREE_CODE (t) != SSA_NAME)
578 var_ann_t ann = var_ann (t);
579 ann->in_vuse_list = 0;
584 finalize_ssa_vuse_ops (stmt);
586 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
587 cleanup_v_may_defs ();
589 /* Free the VUSEs build vector. */
590 VEC_truncate (tree, build_vuses, 0);
594 /* Return a new V_MUST_DEF operand vector for STMT, comparing to OLD_OPS_P. */
596 #define FINALIZE_OPBUILD build_v_must_defs
597 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
598 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
599 build_v_must_defs, (I)))
600 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
601 #define FINALIZE_ALLOC alloc_mustdef
602 #define FINALIZE_FREE free_mustdefs
603 #define FINALIZE_TYPE struct mustdef_optype_d
604 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
605 #define FINALIZE_OPS MUSTDEF_OPS
606 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
607 #define FINALIZE_CORRECT_USE set_virtual_use_link
608 #define FINALIZE_BASE_ZERO 0
609 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
610 #define FINALIZE_BASE_TYPE unsigned
611 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
612 (PTR)->def_var = (VAL); \
613 (PTR)->kill_var = (VAL); \
614 (PTR)->use_ptr.use = &((PTR)->kill_var);\
615 link_imm_use_stmt (&((PTR)->use_ptr), \
617 #include "tree-ssa-opfinalize.h"
621 finalize_ssa_v_must_defs (tree stmt)
623 /* In the presence of subvars, there may be more than one V_MUST_DEF
624 per statement (one for each subvar). It is a bit expensive to
625 verify that all must-defs in a statement belong to subvars if
626 there is more than one must-def, so we don't do it. Suffice to
627 say, if you reach here without having subvars, and have num >1,
628 you have hit a bug. */
629 finalize_ssa_v_must_def_ops (stmt);
630 VEC_truncate (tree, build_v_must_defs, 0);
634 /* Finalize all the build vectors, fill the new ones into INFO. */
637 finalize_ssa_stmt_operands (tree stmt)
639 finalize_ssa_defs (stmt);
640 finalize_ssa_uses (stmt);
641 finalize_ssa_v_must_defs (stmt);
642 finalize_ssa_v_may_defs (stmt);
643 finalize_ssa_vuses (stmt);
647 /* Start the process of building up operands vectors in INFO. */
650 start_ssa_stmt_operands (void)
652 gcc_assert (VEC_length (tree, build_defs) == 0);
653 gcc_assert (VEC_length (tree, build_uses) == 0);
654 gcc_assert (VEC_length (tree, build_vuses) == 0);
655 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
656 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
660 /* Add DEF_P to the list of pointers to operands. */
663 append_def (tree *def_p)
665 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
669 /* Add USE_P to the list of pointers to operands. */
672 append_use (tree *use_p)
674 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
678 /* Add a new virtual may def for variable VAR to the build array. */
681 append_v_may_def (tree var)
683 if (TREE_CODE (var) != SSA_NAME)
685 var_ann_t ann = get_var_ann (var);
687 /* Don't allow duplicate entries. */
688 if (ann->in_v_may_def_list)
690 ann->in_v_may_def_list = 1;
693 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
697 /* Add VAR to the list of virtual uses. */
700 append_vuse (tree var)
702 /* Don't allow duplicate entries. */
703 if (TREE_CODE (var) != SSA_NAME)
705 var_ann_t ann = get_var_ann (var);
707 if (ann->in_vuse_list || ann->in_v_may_def_list)
709 ann->in_vuse_list = 1;
712 VEC_safe_push (tree, heap, build_vuses, (tree)var);
716 /* Add VAR to the list of virtual must definitions for INFO. */
719 append_v_must_def (tree var)
723 /* Don't allow duplicate entries. */
724 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
725 if (var == VEC_index (tree, build_v_must_defs, i))
728 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
732 /* REF is a tree that contains the entire pointer dereference
733 expression, if available, or NULL otherwise. ALIAS is the variable
734 we are asking if REF can access. OFFSET and SIZE come from the
735 memory access expression that generated this virtual operand.
736 FOR_CLOBBER is true is this is adding a virtual operand for a call
740 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
743 bool offsetgtz = offset > 0;
744 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
745 tree base = ref ? get_base_address (ref) : NULL;
747 /* If ALIAS is an SFT, it can't be touched if the offset
748 and size of the access is not overlapping with the SFT offset and
749 size. This is only true if we are accessing through a pointer
750 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
751 be accessing through a pointer to some substruct of the
752 structure, and if we try to prune there, we will have the wrong
753 offset, and get the wrong answer.
754 i.e., we can't prune without more work if we have something like
768 foo = &targetm.asm_out.aligned_op;
771 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
772 terms of SFT_PARENT_VAR, that is where it is.
773 However, the access through the foo pointer will be at offset 0. */
775 && TREE_CODE (alias) == STRUCT_FIELD_TAG
777 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
778 && !overlap_subvar (offset, size, alias, NULL))
780 #ifdef ACCESS_DEBUGGING
781 fprintf (stderr, "Access to ");
782 print_generic_expr (stderr, ref, 0);
783 fprintf (stderr, " may not touch ");
784 print_generic_expr (stderr, alias, 0);
785 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
790 /* Without strict aliasing, it is impossible for a component access
791 through a pointer to touch a random variable, unless that
792 variable *is* a structure or a pointer.
794 That is, given p->c, and some random global variable b,
795 there is no legal way that p->c could be an access to b.
797 Without strict aliasing on, we consider it legal to do something
800 struct foos { int l; };
802 static struct foos *getfoo(void);
805 struct foos *f = getfoo();
812 static struct foos *getfoo(void)
813 { return (struct foos *)&foo; }
815 (taken from 20000623-1.c)
818 && flag_strict_aliasing
819 && TREE_CODE (ref) != INDIRECT_REF
821 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
822 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
823 && !POINTER_TYPE_P (TREE_TYPE (alias)))
825 #ifdef ACCESS_DEBUGGING
826 fprintf (stderr, "Access to ");
827 print_generic_expr (stderr, ref, 0);
828 fprintf (stderr, " may not touch ");
829 print_generic_expr (stderr, alias, 0);
830 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
835 /* If the offset of the access is greater than the size of one of
836 the possible aliases, it can't be touching that alias, because it
837 would be past the end of the structure. */
839 && flag_strict_aliasing
840 && TREE_CODE (ref) != INDIRECT_REF
842 && !POINTER_TYPE_P (TREE_TYPE (alias))
845 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
846 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
848 #ifdef ACCESS_DEBUGGING
849 fprintf (stderr, "Access to ");
850 print_generic_expr (stderr, ref, 0);
851 fprintf (stderr, " may not touch ");
852 print_generic_expr (stderr, alias, 0);
853 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
862 /* Add VAR to the virtual operands array. FLAGS is as in
863 get_expr_operands. FULL_REF is a tree that contains the entire
864 pointer dereference expression, if available, or NULL otherwise.
865 OFFSET and SIZE come from the memory access expression that
866 generated this virtual operand. FOR_CLOBBER is true is this is
867 adding a virtual operand for a call clobber. */
870 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
871 tree full_ref, HOST_WIDE_INT offset,
872 HOST_WIDE_INT size, bool for_clobber)
874 VEC(tree,gc) *aliases;
878 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
879 v_ann = var_ann (sym);
881 /* Mark statements with volatile operands. Optimizers should back
882 off from statements having volatile operands. */
883 if (TREE_THIS_VOLATILE (sym) && s_ann)
884 s_ann->has_volatile_ops = true;
886 /* If the variable cannot be modified and this is a V_MAY_DEF change
887 it into a VUSE. This happens when read-only variables are marked
888 call-clobbered and/or aliased to writable variables. So we only
889 check that this only happens on non-specific stores.
891 Note that if this is a specific store, i.e. associated with a
892 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
893 into validation problems.
895 This can happen when programs cast away const, leaving us with a
896 store to read-only memory. If the statement is actually executed
897 at runtime, then the program is ill formed. If the statement is
898 not executed then all is well. At the very least, we cannot ICE. */
899 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
900 flags &= ~(opf_is_def | opf_kill_def);
902 /* The variable is not a GIMPLE register. Add it (or its aliases) to
903 virtual operands, unless the caller has specifically requested
904 not to add virtual operands (used when adding operands inside an
905 ADDR_EXPR expression). */
906 if (flags & opf_no_vops)
909 aliases = v_ann->may_aliases;
912 /* The variable is not aliased or it is an alias tag. */
913 if (flags & opf_is_def)
915 if (flags & opf_kill_def)
917 /* V_MUST_DEF for non-aliased, non-GIMPLE register
918 variable definitions. */
919 gcc_assert (!MTAG_P (var)
920 || TREE_CODE (var) == STRUCT_FIELD_TAG);
921 append_v_must_def (var);
925 /* Add a V_MAY_DEF for call-clobbered variables and
927 append_v_may_def (var);
938 /* The variable is aliased. Add its aliases to the virtual
940 gcc_assert (VEC_length (tree, aliases) != 0);
942 if (flags & opf_is_def)
945 bool none_added = true;
947 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
949 if (!access_can_touch_variable (full_ref, al, offset, size))
953 append_v_may_def (al);
956 /* If the variable is also an alias tag, add a virtual
957 operand for it, otherwise we will miss representing
958 references to the members of the variable's alias set.
959 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
961 It is also necessary to add bare defs on clobbers for
962 SMT's, so that bare SMT uses caused by pruning all the
963 aliases will link up properly with calls. In order to
964 keep the number of these bare defs we add down to the
965 minimum necessary, we keep track of which SMT's were used
966 alone in statement vdefs or VUSEs. */
967 if (v_ann->is_aliased
969 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
971 && SMT_USED_ALONE (var)))
973 /* Every bare SMT def we add should have SMT_USED_ALONE
974 set on it, or else we will get the wrong answer on
977 && !updating_used_alone && aliases_computed_p
978 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
979 gcc_assert (SMT_USED_ALONE (var));
981 append_v_may_def (var);
986 bool none_added = true;
987 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
989 if (!access_can_touch_variable (full_ref, al, offset, size))
995 /* Similarly, append a virtual uses for VAR itself, when
996 it is an alias tag. */
997 if (v_ann->is_aliased || none_added)
1004 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1005 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1006 the statement's real operands, otherwise it is added to virtual
1010 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1017 gcc_assert (SSA_VAR_P (var));
1019 is_real_op = is_gimple_reg (var);
1021 /* If this is a real operand, the operand is either an SSA name or a
1022 decl. Virtual operands may only be decls. */
1023 gcc_assert (is_real_op || DECL_P (var));
1025 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1026 v_ann = var_ann (sym);
1028 /* Mark statements with volatile operands. Optimizers should back
1029 off from statements having volatile operands. */
1030 if (TREE_THIS_VOLATILE (sym) && s_ann)
1031 s_ann->has_volatile_ops = true;
1035 /* The variable is a GIMPLE register. Add it to real operands. */
1036 if (flags & opf_is_def)
1042 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1046 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1047 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1049 STMT is the statement being processed, EXPR is the INDIRECT_REF
1052 FLAGS is as in get_expr_operands.
1054 FULL_REF contains the full pointer dereference expression, if we
1055 have it, or NULL otherwise.
1057 OFFSET and SIZE are the location of the access inside the
1058 dereferenced pointer, if known.
1060 RECURSE_ON_BASE should be set to true if we want to continue
1061 calling get_expr_operands on the base pointer, and false if
1062 something else will do it for us. */
1065 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1067 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1068 bool recurse_on_base)
1070 tree *pptr = &TREE_OPERAND (expr, 0);
1072 stmt_ann_t s_ann = stmt_ann (stmt);
1074 /* Stores into INDIRECT_REF operands are never killing definitions. */
1075 flags &= ~opf_kill_def;
1077 if (SSA_VAR_P (ptr))
1079 struct ptr_info_def *pi = NULL;
1081 /* If PTR has flow-sensitive points-to information, use it. */
1082 if (TREE_CODE (ptr) == SSA_NAME
1083 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1084 && pi->name_mem_tag)
1086 /* PTR has its own memory tag. Use it. */
1087 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1088 full_ref, offset, size, false);
1092 /* If PTR is not an SSA_NAME or it doesn't have a name
1093 tag, use its symbol memory tag. */
1096 /* If we are emitting debugging dumps, display a warning if
1097 PTR is an SSA_NAME with no flow-sensitive alias
1098 information. That means that we may need to compute
1101 && TREE_CODE (ptr) == SSA_NAME
1105 "NOTE: no flow-sensitive alias info for ");
1106 print_generic_expr (dump_file, ptr, dump_flags);
1107 fprintf (dump_file, " in ");
1108 print_generic_stmt (dump_file, stmt, dump_flags);
1111 if (TREE_CODE (ptr) == SSA_NAME)
1112 ptr = SSA_NAME_VAR (ptr);
1113 v_ann = var_ann (ptr);
1115 if (v_ann->symbol_mem_tag)
1116 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1117 full_ref, offset, size, false);
1120 else if (TREE_CODE (ptr) == INTEGER_CST)
1122 /* If a constant is used as a pointer, we can't generate a real
1123 operand for it but we mark the statement volatile to prevent
1124 optimizations from messing things up. */
1126 s_ann->has_volatile_ops = true;
1131 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1135 /* If requested, add a USE operand for the base pointer. */
1136 if (recurse_on_base)
1137 get_expr_operands (stmt, pptr, opf_none);
1141 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1144 get_tmr_operands (tree stmt, tree expr, int flags)
1146 tree tag = TMR_TAG (expr), ref;
1147 HOST_WIDE_INT offset, size, maxsize;
1149 stmt_ann_t s_ann = stmt_ann (stmt);
1151 /* First record the real operands. */
1152 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1153 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1155 /* MEM_REFs should never be killing. */
1156 flags &= ~opf_kill_def;
1158 if (TMR_SYMBOL (expr))
1160 stmt_ann_t ann = stmt_ann (stmt);
1161 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1166 /* Something weird, so ensure that we will be careful. */
1167 stmt_ann (stmt)->has_volatile_ops = true;
1173 get_expr_operands (stmt, &tag, flags);
1177 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1178 gcc_assert (ref != NULL_TREE);
1179 svars = get_subvars_for_var (ref);
1180 for (sv = svars; sv; sv = sv->next)
1183 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1185 int subvar_flags = flags;
1186 if (!exact || size != maxsize)
1187 subvar_flags &= ~opf_kill_def;
1188 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1194 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1195 clobbered variables in the function. */
1198 add_call_clobber_ops (tree stmt, tree callee)
1202 stmt_ann_t s_ann = stmt_ann (stmt);
1203 bitmap not_read_b, not_written_b;
1205 /* Functions that are not const, pure or never return may clobber
1206 call-clobbered variables. */
1208 s_ann->makes_clobbering_call = true;
1210 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1211 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1214 add_stmt_operand (&global_var, s_ann, opf_is_def);
1218 /* Get info for local and module level statics. There is a bit
1219 set for each static if the call being processed does not read
1220 or write that variable. */
1221 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1222 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1223 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1224 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1226 tree var = referenced_var_lookup (u);
1227 unsigned int escape_mask = var_ann (var)->escape_mask;
1228 tree real_var = var;
1232 /* Not read and not written are computed on regular vars, not
1233 subvars, so look at the parent var if this is an SFT. */
1234 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1235 real_var = SFT_PARENT_VAR (var);
1237 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1238 DECL_UID (real_var)) : false;
1239 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1240 DECL_UID (real_var)) : false;
1241 gcc_assert (!unmodifiable_var_p (var));
1243 clobber_stats.clobbered_vars++;
1245 /* See if this variable is really clobbered by this function. */
1247 /* Trivial case: Things escaping only to pure/const are not
1248 clobbered by non-pure-const, and only read by pure/const. */
1249 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1251 tree call = get_call_expr_in (stmt);
1252 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1254 add_stmt_operand (&var, s_ann, opf_none);
1255 clobber_stats.unescapable_clobbers_avoided++;
1260 clobber_stats.unescapable_clobbers_avoided++;
1267 clobber_stats.static_write_clobbers_avoided++;
1269 add_stmt_operand (&var, s_ann, opf_none);
1271 clobber_stats.static_read_clobbers_avoided++;
1274 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1279 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1283 add_call_read_ops (tree stmt, tree callee)
1287 stmt_ann_t s_ann = stmt_ann (stmt);
1290 /* if the function is not pure, it may reference memory. Add
1291 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1292 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1295 add_stmt_operand (&global_var, s_ann, opf_none);
1299 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1301 /* Add a VUSE for each call-clobbered variable. */
1302 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1304 tree var = referenced_var (u);
1305 tree real_var = var;
1308 clobber_stats.readonly_clobbers++;
1310 /* Not read and not written are computed on regular vars, not
1311 subvars, so look at the parent var if this is an SFT. */
1313 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1314 real_var = SFT_PARENT_VAR (var);
1316 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1321 clobber_stats.static_readonly_clobbers_avoided++;
1325 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1330 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1333 get_call_expr_operands (tree stmt, tree expr)
1336 int call_flags = call_expr_flags (expr);
1338 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1339 operands for all the symbols that have been found to be
1342 Note that if aliases have not been computed, the global effects
1343 of calls will not be included in the SSA web. This is fine
1344 because no optimizer should run before aliases have been
1345 computed. By not bothering with virtual operands for CALL_EXPRs
1346 we avoid adding superfluous virtual operands, which can be a
1347 significant compile time sink (See PR 15855). */
1348 if (aliases_computed_p
1349 && !bitmap_empty_p (call_clobbered_vars)
1350 && !(call_flags & ECF_NOVOPS))
1352 /* A 'pure' or a 'const' function never call-clobbers anything.
1353 A 'noreturn' function might, but since we don't return anyway
1354 there is no point in recording that. */
1355 if (TREE_SIDE_EFFECTS (expr)
1356 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1357 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1358 else if (!(call_flags & ECF_CONST))
1359 add_call_read_ops (stmt, get_callee_fndecl (expr));
1362 /* Find uses in the called function. */
1363 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1365 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1366 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1368 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1372 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1375 get_asm_expr_operands (tree stmt)
1377 stmt_ann_t s_ann = stmt_ann (stmt);
1378 int noutputs = list_length (ASM_OUTPUTS (stmt));
1379 const char **oconstraints
1380 = (const char **) alloca ((noutputs) * sizeof (const char *));
1383 const char *constraint;
1384 bool allows_mem, allows_reg, is_inout;
1386 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1388 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1389 oconstraints[i] = constraint;
1390 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1391 &allows_reg, &is_inout);
1393 /* This should have been split in gimplify_asm_expr. */
1394 gcc_assert (!allows_reg || !is_inout);
1396 /* Memory operands are addressable. Note that STMT needs the
1397 address of this operand. */
1398 if (!allows_reg && allows_mem)
1400 tree t = get_base_address (TREE_VALUE (link));
1401 if (t && DECL_P (t) && s_ann)
1402 add_to_addressable_set (t, &s_ann->addresses_taken);
1405 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1408 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1410 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1411 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1412 oconstraints, &allows_mem, &allows_reg);
1414 /* Memory operands are addressable. Note that STMT needs the
1415 address of this operand. */
1416 if (!allows_reg && allows_mem)
1418 tree t = get_base_address (TREE_VALUE (link));
1419 if (t && DECL_P (t) && s_ann)
1420 add_to_addressable_set (t, &s_ann->addresses_taken);
1423 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1427 /* Clobber memory for asm ("" : : : "memory"); */
1428 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1429 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1434 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1435 decided to group them). */
1437 add_stmt_operand (&global_var, s_ann, opf_is_def);
1439 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1441 tree var = referenced_var (i);
1442 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1445 /* Now clobber all addressables. */
1446 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1448 tree var = referenced_var (i);
1450 /* Subvars are explicitly represented in this list, so
1451 we don't need the original to be added to the clobber
1452 ops, but the original *will* be in this list because
1453 we keep the addressability of the original
1454 variable up-to-date so we don't screw up the rest of
1456 if (var_can_have_subvars (var)
1457 && get_subvars_for_var (var) != NULL)
1460 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1468 /* Scan operands for the assignment expression EXPR in statement STMT. */
1471 get_modify_expr_operands (tree stmt, tree expr)
1473 /* First get operands from the RHS. */
1474 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1476 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1477 registers. If the LHS is a store to memory, we will either need
1478 a preserving definition (V_MAY_DEF) or a killing definition
1481 Preserving definitions are those that modify a part of an
1482 aggregate object for which no subvars have been computed (or the
1483 reference does not correspond exactly to one of them). Stores
1484 through a pointer are also represented with V_MAY_DEF operators.
1486 The determination of whether to use a preserving or a killing
1487 definition is done while scanning the LHS of the assignment. By
1488 default, assume that we will emit a V_MUST_DEF. */
1489 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1493 /* Recursively scan the expression pointed to by EXPR_P in statement
1494 STMT. FLAGS is one of the OPF_* constants modifying how to
1495 interpret the operands found. */
1498 get_expr_operands (tree stmt, tree *expr_p, int flags)
1500 enum tree_code code;
1501 enum tree_code_class class;
1502 tree expr = *expr_p;
1503 stmt_ann_t s_ann = stmt_ann (stmt);
1508 code = TREE_CODE (expr);
1509 class = TREE_CODE_CLASS (code);
1514 /* Taking the address of a variable does not represent a
1515 reference to it, but the fact that the statement takes its
1516 address will be of interest to some passes (e.g. alias
1518 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1520 /* If the address is invariant, there may be no interesting
1521 variable references inside. */
1522 if (is_gimple_min_invariant (expr))
1525 /* Otherwise, there may be variables referenced inside but there
1526 should be no VUSEs created, since the referenced objects are
1527 not really accessed. The only operands that we should find
1528 here are ARRAY_REF indices which will always be real operands
1529 (GIMPLE does not allow non-registers as array indices). */
1530 flags |= opf_no_vops;
1531 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1535 case STRUCT_FIELD_TAG:
1536 case SYMBOL_MEMORY_TAG:
1537 case NAME_MEMORY_TAG:
1538 add_stmt_operand (expr_p, s_ann, flags);
1547 /* Add the subvars for a variable, if it has subvars, to DEFS
1548 or USES. Otherwise, add the variable itself. Whether it
1549 goes to USES or DEFS depends on the operand flags. */
1550 if (var_can_have_subvars (expr)
1551 && (svars = get_subvars_for_var (expr)))
1554 for (sv = svars; sv; sv = sv->next)
1555 add_stmt_operand (&sv->var, s_ann, flags);
1558 add_stmt_operand (expr_p, s_ann, flags);
1563 case MISALIGNED_INDIRECT_REF:
1564 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1567 case ALIGN_INDIRECT_REF:
1569 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1572 case TARGET_MEM_REF:
1573 get_tmr_operands (stmt, expr, flags);
1577 case ARRAY_RANGE_REF:
1583 HOST_WIDE_INT offset, size, maxsize;
1586 /* This component reference becomes an access to all of the
1587 subvariables it can touch, if we can determine that, but
1588 *NOT* the real one. If we can't determine which fields we
1589 could touch, the recursion will eventually get to a
1590 variable and add *all* of its subvars, or whatever is the
1591 minimum correct subset. */
1592 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1593 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1596 subvar_t svars = get_subvars_for_var (ref);
1598 for (sv = svars; sv; sv = sv->next)
1602 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1604 int subvar_flags = flags;
1606 if (!exact || size != maxsize)
1607 subvar_flags &= ~opf_kill_def;
1608 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1613 flags |= opf_no_vops;
1615 else if (TREE_CODE (ref) == INDIRECT_REF)
1617 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1619 flags |= opf_no_vops;
1622 /* Even if we found subvars above we need to ensure to see
1623 immediate uses for d in s.a[d]. In case of s.a having
1624 a subvar or we would miss it otherwise. */
1625 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1626 flags & ~opf_kill_def);
1628 if (code == COMPONENT_REF)
1630 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1631 s_ann->has_volatile_ops = true;
1632 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1634 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1636 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1637 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1638 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1644 case WITH_SIZE_EXPR:
1645 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1646 and an rvalue reference to its second argument. */
1647 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1648 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1652 get_call_expr_operands (stmt, expr);
1657 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1658 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1659 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1663 get_modify_expr_operands (stmt, expr);
1668 /* General aggregate CONSTRUCTORs have been decomposed, but they
1669 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1670 constructor_elt *ce;
1671 unsigned HOST_WIDE_INT idx;
1674 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1676 get_expr_operands (stmt, &ce->value, opf_none);
1682 /* Stores using BIT_FIELD_REF are always preserving definitions. */
1683 flags &= ~opf_kill_def;
1687 case TRUTH_NOT_EXPR:
1688 case VIEW_CONVERT_EXPR:
1690 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1693 case TRUTH_AND_EXPR:
1695 case TRUTH_XOR_EXPR:
1701 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1702 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1707 case REALIGN_LOAD_EXPR:
1709 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1710 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1711 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1724 case OMP_RETURN_EXPR:
1729 /* Expressions that make no memory references. */
1733 if (class == tcc_unary)
1735 if (class == tcc_binary || class == tcc_comparison)
1737 if (class == tcc_constant || class == tcc_type)
1741 /* If we get here, something has gone wrong. */
1742 #ifdef ENABLE_CHECKING
1743 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1745 fputs ("\n", stderr);
1751 /* Parse STMT looking for operands. When finished, the various
1752 build_* operand vectors will have potential operands in them. */
1755 parse_ssa_operands (tree stmt)
1757 enum tree_code code;
1759 code = TREE_CODE (stmt);
1763 get_modify_expr_operands (stmt, stmt);
1767 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
1771 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
1775 get_asm_expr_operands (stmt);
1779 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
1783 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
1787 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
1791 case CASE_LABEL_EXPR:
1792 case TRY_CATCH_EXPR:
1793 case TRY_FINALLY_EXPR:
1794 case EH_FILTER_EXPR:
1797 /* These nodes contain no variable references. */
1801 /* Notice that if get_expr_operands tries to use &STMT as the
1802 operand pointer (which may only happen for USE operands), we
1803 will fail in add_stmt_operand. This default will handle
1804 statements like empty statements, or CALL_EXPRs that may
1805 appear on the RHS of a statement or as statements themselves. */
1806 get_expr_operands (stmt, &stmt, opf_none);
1812 /* Create an operands cache for STMT. */
1815 build_ssa_operands (tree stmt)
1817 stmt_ann_t ann = get_stmt_ann (stmt);
1819 /* Initially assume that the statement has no volatile operands. */
1821 ann->has_volatile_ops = false;
1823 start_ssa_stmt_operands ();
1825 parse_ssa_operands (stmt);
1826 operand_build_sort_virtual (build_vuses);
1827 operand_build_sort_virtual (build_v_may_defs);
1828 operand_build_sort_virtual (build_v_must_defs);
1830 finalize_ssa_stmt_operands (stmt);
1834 /* Free any operands vectors in OPS. */
1837 free_ssa_operands (stmt_operands_p ops)
1839 ops->def_ops = NULL;
1840 ops->use_ops = NULL;
1841 ops->maydef_ops = NULL;
1842 ops->mustdef_ops = NULL;
1843 ops->vuse_ops = NULL;
1847 /* Get the operands of statement STMT. */
1850 update_stmt_operands (tree stmt)
1852 stmt_ann_t ann = get_stmt_ann (stmt);
1854 /* If update_stmt_operands is called before SSA is initialized, do
1856 if (!ssa_operands_active ())
1859 /* The optimizers cannot handle statements that are nothing but a
1860 _DECL. This indicates a bug in the gimplifier. */
1861 gcc_assert (!SSA_VAR_P (stmt));
1863 gcc_assert (ann->modified);
1865 timevar_push (TV_TREE_OPS);
1867 build_ssa_operands (stmt);
1869 /* Clear the modified bit for STMT. */
1872 timevar_pop (TV_TREE_OPS);
1876 /* Copies virtual operands from SRC to DST. */
1879 copy_virtual_operands (tree dest, tree src)
1882 ssa_op_iter iter, old_iter;
1883 use_operand_p use_p, u2;
1884 def_operand_p def_p, d2;
1886 build_ssa_operands (dest);
1888 /* Copy all the virtual fields. */
1889 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1891 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1892 append_v_may_def (t);
1893 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1894 append_v_must_def (t);
1896 if (VEC_length (tree, build_vuses) == 0
1897 && VEC_length (tree, build_v_may_defs) == 0
1898 && VEC_length (tree, build_v_must_defs) == 0)
1901 /* Now commit the virtual operands to this stmt. */
1902 finalize_ssa_v_must_defs (dest);
1903 finalize_ssa_v_may_defs (dest);
1904 finalize_ssa_vuses (dest);
1906 /* Finally, set the field to the same values as then originals. */
1907 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1908 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1910 gcc_assert (!op_iter_done (&old_iter));
1912 t = op_iter_next_tree (&old_iter);
1914 gcc_assert (op_iter_done (&old_iter));
1916 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1917 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1919 gcc_assert (!op_iter_done (&old_iter));
1920 SET_USE (use_p, USE_FROM_PTR (u2));
1921 SET_DEF (def_p, DEF_FROM_PTR (d2));
1922 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1924 gcc_assert (op_iter_done (&old_iter));
1926 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1927 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1929 gcc_assert (!op_iter_done (&old_iter));
1930 SET_USE (use_p, USE_FROM_PTR (u2));
1931 SET_DEF (def_p, DEF_FROM_PTR (d2));
1932 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1934 gcc_assert (op_iter_done (&old_iter));
1939 /* Specifically for use in DOM's expression analysis. Given a store, we
1940 create an artificial stmt which looks like a load from the store, this can
1941 be used to eliminate redundant loads. OLD_OPS are the operands from the
1942 store stmt, and NEW_STMT is the new load which represents a load of the
1946 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1951 use_operand_p use_p;
1954 ann = get_stmt_ann (new_stmt);
1956 /* Process the stmt looking for operands. */
1957 start_ssa_stmt_operands ();
1958 parse_ssa_operands (new_stmt);
1960 for (x = 0; x < VEC_length (tree, build_vuses); x++)
1962 tree t = VEC_index (tree, build_vuses, x);
1963 if (TREE_CODE (t) != SSA_NAME)
1965 var_ann_t ann = var_ann (t);
1966 ann->in_vuse_list = 0;
1970 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
1972 tree t = VEC_index (tree, build_v_may_defs, x);
1973 if (TREE_CODE (t) != SSA_NAME)
1975 var_ann_t ann = var_ann (t);
1976 ann->in_v_may_def_list = 0;
1980 /* Remove any virtual operands that were found. */
1981 VEC_truncate (tree, build_v_may_defs, 0);
1982 VEC_truncate (tree, build_v_must_defs, 0);
1983 VEC_truncate (tree, build_vuses, 0);
1985 /* For each VDEF on the original statement, we want to create a
1986 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1988 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1989 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1992 /* Now build the operands for this new stmt. */
1993 finalize_ssa_stmt_operands (new_stmt);
1995 /* All uses in this fake stmt must not be in the immediate use lists. */
1996 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1997 delink_imm_use (use_p);
2001 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2002 to test the validity of the swap operation. */
2005 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2011 /* If the operand cache is active, attempt to preserve the relative
2012 positions of these two operands in their respective immediate use
2014 if (ssa_operands_active () && op0 != op1)
2016 use_optype_p use0, use1, ptr;
2019 /* Find the 2 operands in the cache, if they are there. */
2020 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2021 if (USE_OP_PTR (ptr)->use == exp0)
2027 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2028 if (USE_OP_PTR (ptr)->use == exp1)
2034 /* If both uses don't have operand entries, there isn't much we can do
2035 at this point. Presumably we don't need to worry about it. */
2038 tree *tmp = USE_OP_PTR (use1)->use;
2039 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2040 USE_OP_PTR (use0)->use = tmp;
2044 /* Now swap the data. */
2050 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2051 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2052 a single variable whose address has been taken or any other valid
2053 GIMPLE memory reference (structure reference, array, etc). If the
2054 base address of REF is a decl that has sub-variables, also add all
2055 of its sub-variables. */
2058 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2063 gcc_assert (addresses_taken);
2065 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2066 as the only thing we take the address of. If VAR is a structure,
2067 taking the address of a field means that the whole structure may
2068 be referenced using pointer arithmetic. See PR 21407 and the
2069 ensuing mailing list discussion. */
2070 var = get_base_address (ref);
2071 if (var && SSA_VAR_P (var))
2073 if (*addresses_taken == NULL)
2074 *addresses_taken = BITMAP_GGC_ALLOC ();
2076 if (var_can_have_subvars (var)
2077 && (svars = get_subvars_for_var (var)))
2080 for (sv = svars; sv; sv = sv->next)
2082 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2083 TREE_ADDRESSABLE (sv->var) = 1;
2088 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2089 TREE_ADDRESSABLE (var) = 1;
2095 /* Scan the immediate_use list for VAR making sure its linked properly.
2096 Return TRUE if there is a problem and emit an error message to F. */
2099 verify_imm_links (FILE *f, tree var)
2101 use_operand_p ptr, prev, list;
2104 gcc_assert (TREE_CODE (var) == SSA_NAME);
2106 list = &(SSA_NAME_IMM_USE_NODE (var));
2107 gcc_assert (list->use == NULL);
2109 if (list->prev == NULL)
2111 gcc_assert (list->next == NULL);
2117 for (ptr = list->next; ptr != list; )
2119 if (prev != ptr->prev)
2122 if (ptr->use == NULL)
2123 goto error; /* 2 roots, or SAFE guard node. */
2124 else if (*(ptr->use) != var)
2130 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2132 if (count++ > 50000000)
2136 /* Verify list in the other direction. */
2138 for (ptr = list->prev; ptr != list; )
2140 if (prev != ptr->next)
2154 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2156 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2157 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2159 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2161 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2167 /* Dump all the immediate uses to FILE. */
2170 dump_immediate_uses_for (FILE *file, tree var)
2172 imm_use_iterator iter;
2173 use_operand_p use_p;
2175 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2177 print_generic_expr (file, var, TDF_SLIM);
2178 fprintf (file, " : -->");
2179 if (has_zero_uses (var))
2180 fprintf (file, " no uses.\n");
2182 if (has_single_use (var))
2183 fprintf (file, " single use.\n");
2185 fprintf (file, "%d uses.\n", num_imm_uses (var));
2187 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2189 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2190 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2192 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2194 fprintf(file, "\n");
2198 /* Dump all the immediate uses to FILE. */
2201 dump_immediate_uses (FILE *file)
2206 fprintf (file, "Immediate_uses: \n\n");
2207 for (x = 1; x < num_ssa_names; x++)
2212 dump_immediate_uses_for (file, var);
2217 /* Dump def-use edges on stderr. */
2220 debug_immediate_uses (void)
2222 dump_immediate_uses (stderr);
2226 /* Dump def-use edges on stderr. */
2229 debug_immediate_uses_for (tree var)
2231 dump_immediate_uses_for (stderr, var);
2234 #include "gt-tree-ssa-operands.h"