1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
36 /* This file contains the code required to mnage the operands cache of the
37 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
38 annotation. This cache contains operands that will be of interets to
39 optimizers and other passes wishing to manipulate the IL.
41 The operand type are broken up into REAL and VIRTUAL operands. The real
42 operands are represented as pointers into the stmt's operand tree. Thus
43 any manipulation of the real operands will be reflected in the actual tree.
44 Virtual operands are represented solely in the cache, although the base
45 variable for the SSA_NAME may, or may not occur in the stmt's tree.
46 Manipulation of the virtual operands will not be reflected in the stmt tree.
48 The routines in this file are concerned with creating this operand cache
51 get_stmt_operands() in the primary entry point.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If its a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in get_stmt_operands and helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Array for building all the def operands. */
103 static GTY (()) varray_type build_defs;
105 /* Array for building all the use operands. */
106 static GTY (()) varray_type build_uses;
108 /* Array for building all the v_may_def operands. */
109 static GTY (()) varray_type build_v_may_defs;
111 /* Array for building all the vuse operands. */
112 static GTY (()) varray_type build_vuses;
114 /* Array for building all the v_must_def operands. */
115 static GTY (()) varray_type build_v_must_defs;
118 #ifdef ENABLE_CHECKING
119 /* Used to make sure operand construction is working on the proper stmt. */
120 tree check_build_stmt;
123 def_operand_p NULL_DEF_OPERAND_P = { NULL };
124 use_operand_p NULL_USE_OPERAND_P = { NULL };
126 static void note_addressable (tree, stmt_ann_t);
127 static void get_expr_operands (tree, tree *, int);
128 static void get_asm_expr_operands (tree);
129 static void get_indirect_ref_operands (tree, tree, int);
130 static void get_call_expr_operands (tree, tree);
131 static inline void append_def (tree *);
132 static inline void append_use (tree *);
133 static void append_v_may_def (tree);
134 static void append_v_must_def (tree);
135 static void add_call_clobber_ops (tree);
136 static void add_call_read_ops (tree);
137 static void add_stmt_operand (tree *, tree, int);
139 /* Return a vector of contiguous memory for NUM def operands. */
141 static inline def_optype
142 allocate_def_optype (unsigned num)
146 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
147 def_ops = ggc_alloc (size);
148 def_ops->num_defs = num;
153 /* Return a vector of contiguous memory for NUM use operands. */
155 static inline use_optype
156 allocate_use_optype (unsigned num)
160 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
161 use_ops = ggc_alloc (size);
162 use_ops->num_uses = num;
167 /* Return a vector of contiguous memory for NUM v_may_def operands. */
169 static inline v_may_def_optype
170 allocate_v_may_def_optype (unsigned num)
172 v_may_def_optype v_may_def_ops;
174 size = sizeof (struct v_may_def_optype_d)
175 + sizeof (v_may_def_operand_type_t) * (num - 1);
176 v_may_def_ops = ggc_alloc (size);
177 v_may_def_ops->num_v_may_defs = num;
178 return v_may_def_ops;
182 /* Return a vector of contiguous memory for NUM v_use operands. */
184 static inline vuse_optype
185 allocate_vuse_optype (unsigned num)
187 vuse_optype vuse_ops;
189 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
190 vuse_ops = ggc_alloc (size);
191 vuse_ops->num_vuses = num;
196 /* Return a vector of contiguous memory for NUM v_must_def operands. */
198 static inline v_must_def_optype
199 allocate_v_must_def_optype (unsigned num)
201 v_must_def_optype v_must_def_ops;
203 size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
204 v_must_def_ops = ggc_alloc (size);
205 v_must_def_ops->num_v_must_defs = num;
206 return v_must_def_ops;
210 /* Free memory for USES. */
213 free_uses (use_optype *uses)
223 /* Free memory for DEFS. */
226 free_defs (def_optype *defs)
236 /* Free memory for VUSES. */
239 free_vuses (vuse_optype *vuses)
249 /* Free memory for V_MAY_DEFS. */
252 free_v_may_defs (v_may_def_optype *v_may_defs)
256 ggc_free (*v_may_defs);
262 /* Free memory for V_MUST_DEFS. */
265 free_v_must_defs (v_must_def_optype *v_must_defs)
269 ggc_free (*v_must_defs);
275 /* Initialize the operand cache routines. */
278 init_ssa_operands (void)
280 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
281 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
282 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
283 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
284 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
288 /* Dispose of anything required by the operand routines. */
291 fini_ssa_operands (void)
296 /* All the finalize_ssa_* routines do the work required to turn the build_
297 VARRAY into an operand_vector of the appropriate type. The original vector,
298 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
299 old vector is reused, the pointer passed in is set to NULL so that
300 the memory is not freed when the old operands are freed. */
302 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
305 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
308 def_optype def_ops, old_ops;
311 num = VARRAY_ACTIVE_SIZE (build_defs);
315 #ifdef ENABLE_CHECKING
316 /* There should only be a single real definition per assignment. */
317 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
321 old_ops = *old_ops_p;
323 /* Compare old vector and new array. */
325 if (old_ops && old_ops->num_defs == num)
328 for (x = 0; x < num; x++)
329 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
343 def_ops = allocate_def_optype (num);
344 for (x = 0; x < num ; x++)
345 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
348 VARRAY_POP_ALL (build_defs);
354 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
357 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
360 use_optype use_ops, old_ops;
363 num = VARRAY_ACTIVE_SIZE (build_uses);
367 #ifdef ENABLE_CHECKING
370 /* If the pointer to the operand is the statement itself, something is
371 wrong. It means that we are pointing to a local variable (the
372 initial call to get_stmt_operands does not pass a pointer to a
374 for (x = 0; x < num; x++)
375 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
379 old_ops = *old_ops_p;
381 /* Check if the old vector and the new array are the same. */
383 if (old_ops && old_ops->num_uses == num)
386 for (x = 0; x < num; x++)
387 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
401 use_ops = allocate_use_optype (num);
402 for (x = 0; x < num ; x++)
403 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
405 VARRAY_POP_ALL (build_uses);
411 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
413 static v_may_def_optype
414 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
416 unsigned num, x, i, old_num;
417 v_may_def_optype v_may_def_ops, old_ops;
421 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
425 old_ops = *old_ops_p;
427 /* Check if the old vector and the new array are the same. */
429 if (old_ops && old_ops->num_v_may_defs == num)
433 for (x = 0; x < num; x++)
435 var = old_ops->v_may_defs[x].def;
436 if (TREE_CODE (var) == SSA_NAME)
437 var = SSA_NAME_VAR (var);
438 if (var != VARRAY_TREE (build_v_may_defs, x))
446 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
450 v_may_def_ops = old_ops;
455 v_may_def_ops = allocate_v_may_def_optype (num);
456 for (x = 0; x < num; x++)
458 var = VARRAY_TREE (build_v_may_defs, x);
459 /* Look for VAR in the old operands vector. */
460 for (i = 0; i < old_num; i++)
462 result = old_ops->v_may_defs[i].def;
463 if (TREE_CODE (result) == SSA_NAME)
464 result = SSA_NAME_VAR (result);
467 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
473 v_may_def_ops->v_may_defs[x].def = var;
474 v_may_def_ops->v_may_defs[x].use = var;
479 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
481 return v_may_def_ops;
485 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
488 finalize_ssa_vuses (vuse_optype *old_ops_p)
490 unsigned num, x, i, num_v_may_defs, old_num;
491 vuse_optype vuse_ops, old_ops;
494 num = VARRAY_ACTIVE_SIZE (build_vuses);
497 VARRAY_POP_ALL (build_v_may_defs);
501 /* Remove superfluous VUSE operands. If the statement already has a
502 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
503 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
504 suppose that variable 'a' is aliased:
507 # a_3 = V_MAY_DEF <a_2>
510 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
513 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
515 if (num_v_may_defs > 0)
519 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
521 vuse = VARRAY_TREE (build_vuses, i);
522 for (j = 0; j < num_v_may_defs; j++)
524 if (vuse == VARRAY_TREE (build_v_may_defs, j))
528 /* If we found a useless VUSE operand, remove it from the
529 operand array by replacing it with the last active element
530 in the operand array (unless the useless VUSE was the
531 last operand, in which case we simply remove it. */
532 if (j != num_v_may_defs)
534 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
536 VARRAY_TREE (build_vuses, i)
537 = VARRAY_TREE (build_vuses,
538 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
540 VARRAY_POP (build_vuses);
542 /* We want to rescan the element at this index, unless
543 this was the last element, in which case the loop
550 num = VARRAY_ACTIVE_SIZE (build_vuses);
551 /* We could have reduced the size to zero now, however. */
554 VARRAY_POP_ALL (build_v_may_defs);
558 old_ops = *old_ops_p;
560 /* Determine whether vuses is the same as the old vector. */
562 if (old_ops && old_ops->num_vuses == num)
566 for (x = 0; x < num ; x++)
569 v = old_ops->vuses[x];
570 if (TREE_CODE (v) == SSA_NAME)
571 v = SSA_NAME_VAR (v);
572 if (v != VARRAY_TREE (build_vuses, x))
580 old_num = (old_ops ? old_ops->num_vuses : 0);
589 vuse_ops = allocate_vuse_optype (num);
590 for (x = 0; x < num; x++)
592 tree result, var = VARRAY_TREE (build_vuses, x);
593 /* Look for VAR in the old vector, and use that SSA_NAME. */
594 for (i = 0; i < old_num; i++)
596 result = old_ops->vuses[i];
597 if (TREE_CODE (result) == SSA_NAME)
598 result = SSA_NAME_VAR (result);
601 vuse_ops->vuses[x] = old_ops->vuses[i];
606 vuse_ops->vuses[x] = var;
610 /* The v_may_def build vector wasn't freed because we needed it here.
611 Free it now with the vuses build vector. */
612 VARRAY_POP_ALL (build_vuses);
613 VARRAY_POP_ALL (build_v_may_defs);
619 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
621 static v_must_def_optype
622 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
623 tree stmt ATTRIBUTE_UNUSED)
625 unsigned num, x, i, old_num = 0;
626 v_must_def_optype v_must_def_ops, old_ops;
629 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
633 #ifdef ENABLE_CHECKING
634 /* There should only be a single V_MUST_DEF per assignment. */
635 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
639 old_ops = *old_ops_p;
641 /* Check if the old vector and the new array are the same. */
643 if (old_ops && old_ops->num_v_must_defs == num)
647 for (x = 0; x < num; x++)
649 tree var = old_ops->v_must_defs[x];
650 if (TREE_CODE (var) == SSA_NAME)
651 var = SSA_NAME_VAR (var);
652 if (var != VARRAY_TREE (build_v_must_defs, x))
660 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
664 v_must_def_ops = old_ops;
669 v_must_def_ops = allocate_v_must_def_optype (num);
670 for (x = 0; x < num ; x++)
672 tree result, var = VARRAY_TREE (build_v_must_defs, x);
673 /* Look for VAR in the original vector. */
674 for (i = 0; i < old_num; i++)
676 result = old_ops->v_must_defs[i];
677 if (TREE_CODE (result) == SSA_NAME)
678 result = SSA_NAME_VAR (result);
681 v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
686 v_must_def_ops->v_must_defs[x] = var;
689 VARRAY_POP_ALL (build_v_must_defs);
691 return v_must_def_ops;
695 /* Finalize all the build vectors, fill the new ones into INFO. */
698 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
699 stmt_operands_p new_ops)
701 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
702 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
703 new_ops->v_must_def_ops
704 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
705 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
706 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
710 /* Start the process of building up operands vectors in INFO. */
713 start_ssa_stmt_operands (void)
715 #ifdef ENABLE_CHECKING
716 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
717 || VARRAY_ACTIVE_SIZE (build_uses) > 0
718 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
719 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
720 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
726 /* Add DEF_P to the list of pointers to operands. */
729 append_def (tree *def_p)
731 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
735 /* Add USE_P to the list of pointers to operands. */
738 append_use (tree *use_p)
740 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
744 /* Add a new virtual may def for variable VAR to the build array. */
747 append_v_may_def (tree var)
751 /* Don't allow duplicate entries. */
752 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
753 if (var == VARRAY_TREE (build_v_may_defs, i))
756 VARRAY_PUSH_TREE (build_v_may_defs, var);
760 /* Add VAR to the list of virtual uses. */
763 append_vuse (tree var)
767 /* Don't allow duplicate entries. */
768 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
769 if (var == VARRAY_TREE (build_vuses, i))
772 VARRAY_PUSH_TREE (build_vuses, var);
776 /* Add VAR to the list of virtual must definitions for INFO. */
779 append_v_must_def (tree var)
783 /* Don't allow duplicate entries. */
784 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
785 if (var == VARRAY_TREE (build_v_must_defs, i))
788 VARRAY_PUSH_TREE (build_v_must_defs, var);
791 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
792 original operands, and if ANN is non-null, appropriate stmt flags are set
793 in the stmt's annotation. Note that some fields in old_ops may
794 change to NULL, although none of the memory they originally pointed to
795 will be destroyed. It is appropriate to call free_stmt_operands() on
796 the value returned in old_ops.
798 The rationale for this: Certain optimizations wish to exmaine the difference
799 between new_ops and old_ops after processing. If a set of operands don't
800 change, new_ops will simply assume the pointer in old_ops, and the old_ops
801 pointer will be set to NULL, indicating no memory needs to be cleared.
802 Usage might appear something like:
804 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
805 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
806 <* compare old_ops_copy and new_ops *>
807 free_ssa_operands (old_ops); */
810 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
811 stmt_operands_p new_ops)
814 tree_ann_t saved_ann = stmt->common.ann;
816 /* Replace stmt's annotation with the one passed in for the duration
817 of the operand building process. This allows "fake" stmts to be built
818 and not be included in other data structures which can be built here. */
819 stmt->common.ann = (tree_ann_t) ann;
821 /* Initially assume that the statement has no volatile operands, nor
822 makes aliased loads or stores. */
825 ann->has_volatile_ops = false;
826 ann->makes_aliased_stores = false;
827 ann->makes_aliased_loads = false;
830 start_ssa_stmt_operands ();
832 code = TREE_CODE (stmt);
836 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
837 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
838 || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
839 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
840 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
841 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
842 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
843 modified in that case. FIXME we should represent somehow
844 that it is killed on the fallthrough path. */
845 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
846 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
848 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
849 opf_is_def | opf_kill_def);
853 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
857 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
861 get_asm_expr_operands (stmt);
865 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
869 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
873 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
876 /* These nodes contain no variable references. */
878 case CASE_LABEL_EXPR:
880 case TRY_FINALLY_EXPR:
887 /* Notice that if get_expr_operands tries to use &STMT as the operand
888 pointer (which may only happen for USE operands), we will abort in
889 append_use. This default will handle statements like empty
890 statements, or CALL_EXPRs that may appear on the RHS of a statement
891 or as statements themselves. */
892 get_expr_operands (stmt, &stmt, opf_none);
896 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
897 stmt->common.ann = saved_ann;
901 /* Free any operands vectors in OPS. */
904 free_ssa_operands (stmt_operands_p ops)
907 free_defs (&(ops->def_ops));
909 free_uses (&(ops->use_ops));
911 free_vuses (&(ops->vuse_ops));
912 if (ops->v_may_def_ops)
913 free_v_may_defs (&(ops->v_may_def_ops));
914 if (ops->v_must_def_ops)
915 free_v_must_defs (&(ops->v_must_def_ops));
919 /* Get the operands of statement STMT. Note that repeated calls to
920 get_stmt_operands for the same statement will do nothing until the
921 statement is marked modified by a call to modify_stmt(). */
924 get_stmt_operands (tree stmt)
927 stmt_operands_t old_operands;
929 #if defined ENABLE_CHECKING
930 /* The optimizers cannot handle statements that are nothing but a
931 _DECL. This indicates a bug in the gimplifier. */
932 if (SSA_VAR_P (stmt))
936 /* Ignore error statements. */
937 if (TREE_CODE (stmt) == ERROR_MARK)
940 ann = get_stmt_ann (stmt);
942 /* If the statement has not been modified, the operands are still valid. */
946 timevar_push (TV_TREE_OPS);
948 old_operands = ann->operands;
949 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
951 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
952 free_ssa_operands (&old_operands);
954 /* Clear the modified bit for STMT. Subsequent calls to
955 get_stmt_operands for this statement will do nothing until the
956 statement is marked modified by a call to modify_stmt(). */
959 timevar_pop (TV_TREE_OPS);
963 /* Recursively scan the expression pointed by EXPR_P in statement referred to
964 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
968 get_expr_operands (tree stmt, tree *expr_p, int flags)
974 if (expr == NULL || expr == error_mark_node)
977 code = TREE_CODE (expr);
978 class = TREE_CODE_CLASS (code);
983 /* We could have the address of a component, array member,
984 etc which has interesting variable references. */
985 /* Taking the address of a variable does not represent a
986 reference to it, but the fact that the stmt takes its address will be
987 of interest to some passes (e.g. alias resolution). */
988 add_stmt_operand (expr_p, stmt, 0);
990 /* If the address is invariant, there may be no interesting variable
991 references inside. */
992 if (is_gimple_min_invariant (expr))
995 /* There should be no VUSEs created, since the referenced objects are
996 not really accessed. The only operands that we should find here
997 are ARRAY_REF indices which will always be real operands (GIMPLE
998 does not allow non-registers as array indices). */
999 flags |= opf_no_vops;
1001 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1008 /* If we found a variable, add it to DEFS or USES depending
1009 on the operand flags. */
1010 add_stmt_operand (expr_p, stmt, flags);
1014 get_indirect_ref_operands (stmt, expr, flags);
1018 case ARRAY_RANGE_REF:
1019 /* Treat array references as references to the virtual variable
1020 representing the array. The virtual variable for an ARRAY_REF
1021 is the VAR_DECL for the array. */
1023 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1024 according to the value of IS_DEF. Recurse if the LHS of the
1025 ARRAY_REF node is not a regular variable. */
1026 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1027 add_stmt_operand (expr_p, stmt, flags);
1029 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1031 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1032 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1033 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1039 /* Similarly to arrays, references to compound variables (complex
1040 types and structures/unions) are globbed.
1042 FIXME: This means that
1048 will not be constant propagated because the two partial
1049 definitions to 'a' will kill each other. Note that SRA may be
1050 able to fix this problem if 'a' can be scalarized. */
1052 /* If the LHS of the compound reference is not a regular variable,
1053 recurse to keep looking for more operands in the subexpression. */
1054 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1055 add_stmt_operand (expr_p, stmt, flags);
1057 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1059 if (code == COMPONENT_REF)
1060 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1063 case WITH_SIZE_EXPR:
1064 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1065 and an rvalue reference to its second argument. */
1066 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1067 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1071 get_call_expr_operands (stmt, expr);
1079 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1081 op = TREE_OPERAND (expr, 0);
1082 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1083 op = TREE_OPERAND (expr, 0);
1084 if (TREE_CODE (op) == ARRAY_REF
1085 || TREE_CODE (op) == ARRAY_RANGE_REF
1086 || TREE_CODE (op) == COMPONENT_REF
1087 || TREE_CODE (op) == REALPART_EXPR
1088 || TREE_CODE (op) == IMAGPART_EXPR)
1089 subflags = opf_is_def;
1091 subflags = opf_is_def | opf_kill_def;
1093 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1099 /* General aggregate CONSTRUCTORs have been decomposed, but they
1100 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1103 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1104 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1109 case TRUTH_NOT_EXPR:
1111 case VIEW_CONVERT_EXPR:
1113 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1116 case TRUTH_AND_EXPR:
1118 case TRUTH_XOR_EXPR:
1123 tree op0 = TREE_OPERAND (expr, 0);
1124 tree op1 = TREE_OPERAND (expr, 1);
1126 /* If it would be profitable to swap the operands, then do so to
1127 canonicalize the statement, enabling better optimization.
1129 By placing canonicalization of such expressions here we
1130 transparently keep statements in canonical form, even
1131 when the statement is modified. */
1132 if (tree_swap_operands_p (op0, op1, false))
1134 /* For relationals we need to swap the operands
1135 and change the code. */
1141 TREE_SET_CODE (expr, swap_tree_comparison (code));
1142 TREE_OPERAND (expr, 0) = op1;
1143 TREE_OPERAND (expr, 1) = op0;
1146 /* For a commutative operator we can just swap the operands. */
1147 else if (commutative_tree_code (code))
1149 TREE_OPERAND (expr, 0) = op1;
1150 TREE_OPERAND (expr, 1) = op0;
1154 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1155 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1164 /* Expressions that make no memory references. */
1170 if (class == '2' || class == '<')
1172 if (class == 'c' || class == 't')
1176 /* If we get here, something has gone wrong. */
1177 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1179 fputs ("\n", stderr);
1184 /* Scan operands in the ASM_EXPR stmt refered to in INFO. */
1187 get_asm_expr_operands (tree stmt)
1189 stmt_ann_t s_ann = stmt_ann (stmt);
1190 int noutputs = list_length (ASM_OUTPUTS (stmt));
1191 const char **oconstraints
1192 = (const char **) alloca ((noutputs) * sizeof (const char *));
1195 const char *constraint;
1196 bool allows_mem, allows_reg, is_inout;
1198 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1200 oconstraints[i] = constraint
1201 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1202 parse_output_constraint (&constraint, i, 0, 0,
1203 &allows_mem, &allows_reg, &is_inout);
1205 #if defined ENABLE_CHECKING
1206 /* This should have been split in gimplify_asm_expr. */
1207 if (allows_reg && is_inout)
1211 /* Memory operands are addressable. Note that STMT needs the
1212 address of this operand. */
1213 if (!allows_reg && allows_mem)
1215 tree t = get_base_address (TREE_VALUE (link));
1216 if (t && DECL_P (t))
1217 note_addressable (t, s_ann);
1220 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1223 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1226 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1227 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1228 oconstraints, &allows_mem, &allows_reg);
1230 /* Memory operands are addressable. Note that STMT needs the
1231 address of this operand. */
1232 if (!allows_reg && allows_mem)
1234 tree t = get_base_address (TREE_VALUE (link));
1235 if (t && DECL_P (t))
1236 note_addressable (t, s_ann);
1239 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1243 /* Clobber memory for asm ("" : : : "memory"); */
1244 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1245 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1249 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1250 decided to group them). */
1252 add_stmt_operand (&global_var, stmt, opf_is_def);
1254 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1256 tree var = referenced_var (i);
1257 add_stmt_operand (&var, stmt, opf_is_def);
1260 /* Now clobber all addressables. */
1261 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1263 tree var = referenced_var (i);
1264 add_stmt_operand (&var, stmt, opf_is_def);
1271 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1274 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1276 tree *pptr = &TREE_OPERAND (expr, 0);
1278 stmt_ann_t ann = stmt_ann (stmt);
1280 /* Stores into INDIRECT_REF operands are never killing definitions. */
1281 flags &= ~opf_kill_def;
1283 if (SSA_VAR_P (ptr))
1285 struct ptr_info_def *pi = NULL;
1287 /* If PTR has flow-sensitive points-to information, use it. */
1288 if (TREE_CODE (ptr) == SSA_NAME
1289 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1290 && pi->name_mem_tag)
1292 /* PTR has its own memory tag. Use it. */
1293 add_stmt_operand (&pi->name_mem_tag, stmt, flags);
1297 /* If PTR is not an SSA_NAME or it doesn't have a name
1298 tag, use its type memory tag. */
1301 /* If we are emitting debugging dumps, display a warning if
1302 PTR is an SSA_NAME with no flow-sensitive alias
1303 information. That means that we may need to compute
1306 && TREE_CODE (ptr) == SSA_NAME
1310 "NOTE: no flow-sensitive alias info for ");
1311 print_generic_expr (dump_file, ptr, dump_flags);
1312 fprintf (dump_file, " in ");
1313 print_generic_stmt (dump_file, stmt, dump_flags);
1316 if (TREE_CODE (ptr) == SSA_NAME)
1317 ptr = SSA_NAME_VAR (ptr);
1318 ann = var_ann (ptr);
1319 if (ann->type_mem_tag)
1320 add_stmt_operand (&ann->type_mem_tag, stmt, flags);
1324 /* If a constant is used as a pointer, we can't generate a real
1325 operand for it but we mark the statement volatile to prevent
1326 optimizations from messing things up. */
1327 else if (TREE_CODE (ptr) == INTEGER_CST)
1330 ann->has_volatile_ops = true;
1334 /* Everything else *should* have been folded elsewhere, but users
1335 are smarter than we in finding ways to write invalid code. We
1336 cannot just abort here. If we were absolutely certain that we
1337 do handle all valid cases, then we could just do nothing here.
1338 That seems optimistic, so attempt to do something logical... */
1339 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1340 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1341 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1343 /* Make sure we know the object is addressable. */
1344 pptr = &TREE_OPERAND (ptr, 0);
1345 add_stmt_operand (pptr, stmt, 0);
1347 /* Mark the object itself with a VUSE. */
1348 pptr = &TREE_OPERAND (*pptr, 0);
1349 get_expr_operands (stmt, pptr, flags);
1353 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1357 /* Add a USE operand for the base pointer. */
1358 get_expr_operands (stmt, pptr, opf_none);
1361 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1364 get_call_expr_operands (tree stmt, tree expr)
1367 int call_flags = call_expr_flags (expr);
1369 /* Find uses in the called function. */
1370 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1372 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1373 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1375 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1377 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1379 /* A 'pure' or a 'const' functions never call clobber anything.
1380 A 'noreturn' function might, but since we don't return anyway
1381 there is no point in recording that. */
1382 if (TREE_SIDE_EFFECTS (expr)
1383 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1384 add_call_clobber_ops (stmt);
1385 else if (!(call_flags & ECF_CONST))
1386 add_call_read_ops (stmt);
1391 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1392 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1393 the statement's real operands, otherwise it is added to virtual
1397 add_stmt_operand (tree *var_p, tree stmt, int flags)
1401 stmt_ann_t s_ann = stmt_ann (stmt);
1407 /* If the operand is an ADDR_EXPR, add its operand to the list of
1408 variables that have had their address taken in this statement. */
1409 if (TREE_CODE (var) == ADDR_EXPR)
1411 note_addressable (TREE_OPERAND (var, 0), s_ann);
1415 /* If the original variable is not a scalar, it will be added to the list
1416 of virtual operands. In that case, use its base symbol as the virtual
1417 variable representing it. */
1418 is_real_op = is_gimple_reg (var);
1419 if (!is_real_op && !DECL_P (var))
1420 var = get_virtual_var (var);
1422 /* If VAR is not a variable that we care to optimize, do nothing. */
1423 if (var == NULL_TREE || !SSA_VAR_P (var))
1426 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1427 v_ann = var_ann (sym);
1429 /* Don't expose volatile variables to the optimizers. */
1430 if (TREE_THIS_VOLATILE (sym))
1433 s_ann->has_volatile_ops = true;
1439 /* The variable is a GIMPLE register. Add it to real operands. */
1440 if (flags & opf_is_def)
1447 varray_type aliases;
1449 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1450 virtual operands, unless the caller has specifically requested
1451 not to add virtual operands (used when adding operands inside an
1452 ADDR_EXPR expression). */
1453 if (flags & opf_no_vops)
1456 aliases = v_ann->may_aliases;
1458 if (aliases == NULL)
1460 /* The variable is not aliased or it is an alias tag. */
1461 if (flags & opf_is_def)
1463 if (v_ann->is_alias_tag)
1465 /* Alias tagged vars get V_MAY_DEF to avoid breaking
1466 def-def chains with the other variables in their
1469 s_ann->makes_aliased_stores = 1;
1470 append_v_may_def (var);
1472 else if (flags & opf_kill_def)
1474 #if defined ENABLE_CHECKING
1475 /* Only regular variables may get a V_MUST_DEF
1477 if (v_ann->mem_tag_kind != NOT_A_TAG)
1480 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1481 variable definitions. */
1482 append_v_must_def (var);
1486 /* Add a V_MAY_DEF for call-clobbered variables and
1488 append_v_may_def (var);
1494 if (s_ann && v_ann->is_alias_tag)
1495 s_ann->makes_aliased_loads = 1;
1502 /* The variable is aliased. Add its aliases to the virtual
1504 #if defined ENABLE_CHECKING
1505 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1509 if (flags & opf_is_def)
1511 /* If the variable is also an alias tag, add a virtual
1512 operand for it, otherwise we will miss representing
1513 references to the members of the variable's alias set.
1514 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1515 if (v_ann->is_alias_tag)
1516 append_v_may_def (var);
1518 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1519 append_v_may_def (VARRAY_TREE (aliases, i));
1522 s_ann->makes_aliased_stores = 1;
1526 /* Similarly, append a virtual uses for VAR itself, when
1527 it is an alias tag. */
1528 if (v_ann->is_alias_tag)
1531 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1532 append_vuse (VARRAY_TREE (aliases, i));
1535 s_ann->makes_aliased_loads = 1;
1542 /* Record that VAR had its address taken in the statement with annotations
1546 note_addressable (tree var, stmt_ann_t s_ann)
1551 var = get_base_address (var);
1552 if (var && SSA_VAR_P (var))
1554 if (s_ann->addresses_taken == NULL)
1555 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1556 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1561 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1562 clobbered variables in the function. */
1565 add_call_clobber_ops (tree stmt)
1567 /* Functions that are not const, pure or never return may clobber
1568 call-clobbered variables. */
1569 if (stmt_ann (stmt))
1570 stmt_ann (stmt)->makes_clobbering_call = true;
1572 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1573 a V_MAY_DEF operand for every call clobbered variable. See
1574 compute_may_aliases for the heuristic used to decide whether
1575 to create .GLOBAL_VAR or not. */
1577 add_stmt_operand (&global_var, stmt, opf_is_def);
1582 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1584 tree var = referenced_var (i);
1586 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1588 if (!TREE_READONLY (var))
1589 add_stmt_operand (&var, stmt, opf_is_def);
1591 add_stmt_operand (&var, stmt, opf_none);
1597 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1601 add_call_read_ops (tree stmt)
1603 /* Otherwise, if the function is not pure, it may reference memory. Add
1604 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1605 for each call-clobbered variable. See add_referenced_var for the
1606 heuristic used to decide whether to create .GLOBAL_VAR. */
1608 add_stmt_operand (&global_var, stmt, opf_none);
1613 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1615 tree var = referenced_var (i);
1616 add_stmt_operand (&var, stmt, opf_none);
1621 /* Copies virtual operands from SRC to DST. */
1624 copy_virtual_operands (tree dst, tree src)
1627 vuse_optype vuses = STMT_VUSE_OPS (src);
1628 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1629 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1630 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1631 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1632 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1636 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1637 for (i = 0; i < NUM_VUSES (vuses); i++)
1638 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1643 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1644 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1646 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1647 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1648 V_MAY_DEF_RESULT (v_may_defs, i));
1654 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1655 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1656 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1661 /* Specifically for use in DOM's expression analysis. Given a store, we
1662 create an artifical stmt which looks like a load from the store, this can
1663 be used to eliminate redundant loads. OLD_OPS are the operands from the
1664 store stmt, and NEW_STMT is the new load which reperesent a load of the
1668 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1672 stmt_operands_t tmp;
1675 memset (&tmp, 0, sizeof (stmt_operands_t));
1676 ann = get_stmt_ann (new_stmt);
1678 /* Free operands just in case is was an existing stmt. */
1679 free_ssa_operands (&(ann->operands));
1681 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1682 free_vuses (&(ann->operands.vuse_ops));
1683 free_v_may_defs (&(ann->operands.v_may_def_ops));
1684 free_v_must_defs (&(ann->operands.v_must_def_ops));
1686 /* For each VDEF on the original statement, we want to create a
1687 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1689 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1691 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1695 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1697 op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
1701 /* Now set the vuses for this new stmt. */
1702 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1705 #include "gt-tree-ssa-operands.h"