1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
36 /* This file contains the code required to mnage the operands cache of the
37 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
38 annotation. This cache contains operands that will be of interets to
39 optimizers and other passes wishing to manipulate the IL.
41 The operand type are broken up into REAL and VIRTUAL operands. The real
42 operands are represented as pointers into the stmt's operand tree. Thus
43 any manipulation of the real operands will be reflected in the actual tree.
44 Virtual operands are represented solely in the cache, although the base
45 variable for the SSA_NAME may, or may not occur in the stmt's tree.
46 Manipulation of the virtual operands will not be reflected in the stmt tree.
48 The routines in this file are concerned with creating this operand cache
51 get_stmt_operands() in the primary entry point.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If its a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in get_stmt_operands and helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Array for building all the def operands. */
103 static GTY (()) varray_type build_defs;
105 /* Array for building all the use operands. */
106 static GTY (()) varray_type build_uses;
108 /* Array for building all the v_may_def operands. */
109 static GTY (()) varray_type build_v_may_defs;
111 /* Array for building all the vuse operands. */
112 static GTY (()) varray_type build_vuses;
114 /* Array for building all the v_must_def operands. */
115 static GTY (()) varray_type build_v_must_defs;
118 #ifdef ENABLE_CHECKING
119 /* Used to make sure operand construction is working on the proper stmt. */
120 tree check_build_stmt;
123 def_operand_p NULL_DEF_OPERAND_P = { NULL };
124 use_operand_p NULL_USE_OPERAND_P = { NULL };
126 static void note_addressable (tree, stmt_ann_t);
127 static void get_expr_operands (tree, tree *, int);
128 static void get_asm_expr_operands (tree);
129 static void get_indirect_ref_operands (tree, tree, int);
130 static void get_call_expr_operands (tree, tree);
131 static inline void append_def (tree *);
132 static inline void append_use (tree *);
133 static void append_v_may_def (tree);
134 static void append_v_must_def (tree);
135 static void add_call_clobber_ops (tree);
136 static void add_call_read_ops (tree);
137 static void add_stmt_operand (tree *, tree, int);
139 /* Return a vector of contiguous memory for NUM def operands. */
141 static inline def_optype
142 allocate_def_optype (unsigned num)
146 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
147 def_ops = ggc_alloc (size);
148 def_ops->num_defs = num;
153 /* Return a vector of contiguous memory for NUM use operands. */
155 static inline use_optype
156 allocate_use_optype (unsigned num)
160 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
161 use_ops = ggc_alloc (size);
162 use_ops->num_uses = num;
167 /* Return a vector of contiguous memory for NUM v_may_def operands. */
169 static inline v_may_def_optype
170 allocate_v_may_def_optype (unsigned num)
172 v_may_def_optype v_may_def_ops;
174 size = sizeof (struct v_may_def_optype_d)
175 + sizeof (v_may_def_operand_type_t) * (num - 1);
176 v_may_def_ops = ggc_alloc (size);
177 v_may_def_ops->num_v_may_defs = num;
178 return v_may_def_ops;
182 /* Return a vector of contiguous memory for NUM v_use operands. */
184 static inline vuse_optype
185 allocate_vuse_optype (unsigned num)
187 vuse_optype vuse_ops;
189 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
190 vuse_ops = ggc_alloc (size);
191 vuse_ops->num_vuses = num;
196 /* Return a vector of contiguous memory for NUM v_must_def operands. */
198 static inline v_must_def_optype
199 allocate_v_must_def_optype (unsigned num)
201 v_must_def_optype v_must_def_ops;
203 size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
204 v_must_def_ops = ggc_alloc (size);
205 v_must_def_ops->num_v_must_defs = num;
206 return v_must_def_ops;
210 /* Free memory for USES. */
213 free_uses (use_optype *uses)
223 /* Free memory for DEFS. */
226 free_defs (def_optype *defs)
236 /* Free memory for VUSES. */
239 free_vuses (vuse_optype *vuses)
249 /* Free memory for V_MAY_DEFS. */
252 free_v_may_defs (v_may_def_optype *v_may_defs)
256 ggc_free (*v_may_defs);
262 /* Free memory for V_MUST_DEFS. */
265 free_v_must_defs (v_must_def_optype *v_must_defs)
269 ggc_free (*v_must_defs);
275 /* Initialize the operand cache routines. */
278 init_ssa_operands (void)
280 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
281 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
282 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
283 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
284 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
288 /* Dispose of anything required by the operand routines. */
291 fini_ssa_operands (void)
293 ggc_free (build_defs);
294 ggc_free (build_uses);
295 ggc_free (build_v_may_defs);
296 ggc_free (build_vuses);
297 ggc_free (build_v_must_defs);
300 build_v_may_defs = NULL;
302 build_v_must_defs = NULL;
306 /* All the finalize_ssa_* routines do the work required to turn the build_
307 VARRAY into an operand_vector of the appropriate type. The original vector,
308 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
309 old vector is reused, the pointer passed in is set to NULL so that
310 the memory is not freed when the old operands are freed. */
312 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
315 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
318 def_optype def_ops, old_ops;
321 num = VARRAY_ACTIVE_SIZE (build_defs);
325 #ifdef ENABLE_CHECKING
326 /* There should only be a single real definition per assignment. */
327 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
331 old_ops = *old_ops_p;
333 /* Compare old vector and new array. */
335 if (old_ops && old_ops->num_defs == num)
338 for (x = 0; x < num; x++)
339 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
353 def_ops = allocate_def_optype (num);
354 for (x = 0; x < num ; x++)
355 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
358 VARRAY_POP_ALL (build_defs);
364 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
367 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
370 use_optype use_ops, old_ops;
373 num = VARRAY_ACTIVE_SIZE (build_uses);
377 #ifdef ENABLE_CHECKING
380 /* If the pointer to the operand is the statement itself, something is
381 wrong. It means that we are pointing to a local variable (the
382 initial call to get_stmt_operands does not pass a pointer to a
384 for (x = 0; x < num; x++)
385 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
389 old_ops = *old_ops_p;
391 /* Check if the old vector and the new array are the same. */
393 if (old_ops && old_ops->num_uses == num)
396 for (x = 0; x < num; x++)
397 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
411 use_ops = allocate_use_optype (num);
412 for (x = 0; x < num ; x++)
413 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
415 VARRAY_POP_ALL (build_uses);
421 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
423 static v_may_def_optype
424 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
426 unsigned num, x, i, old_num;
427 v_may_def_optype v_may_def_ops, old_ops;
431 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
435 old_ops = *old_ops_p;
437 /* Check if the old vector and the new array are the same. */
439 if (old_ops && old_ops->num_v_may_defs == num)
443 for (x = 0; x < num; x++)
445 var = old_ops->v_may_defs[x].def;
446 if (TREE_CODE (var) == SSA_NAME)
447 var = SSA_NAME_VAR (var);
448 if (var != VARRAY_TREE (build_v_may_defs, x))
456 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
460 v_may_def_ops = old_ops;
465 v_may_def_ops = allocate_v_may_def_optype (num);
466 for (x = 0; x < num; x++)
468 var = VARRAY_TREE (build_v_may_defs, x);
469 /* Look for VAR in the old operands vector. */
470 for (i = 0; i < old_num; i++)
472 result = old_ops->v_may_defs[i].def;
473 if (TREE_CODE (result) == SSA_NAME)
474 result = SSA_NAME_VAR (result);
477 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
483 v_may_def_ops->v_may_defs[x].def = var;
484 v_may_def_ops->v_may_defs[x].use = var;
489 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
491 return v_may_def_ops;
495 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
498 finalize_ssa_vuses (vuse_optype *old_ops_p)
500 unsigned num, x, i, num_v_may_defs, old_num;
501 vuse_optype vuse_ops, old_ops;
504 num = VARRAY_ACTIVE_SIZE (build_vuses);
507 VARRAY_POP_ALL (build_v_may_defs);
511 /* Remove superfluous VUSE operands. If the statement already has a
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
514 suppose that variable 'a' is aliased:
517 # a_3 = V_MAY_DEF <a_2>
520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
523 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
525 if (num_v_may_defs > 0)
529 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
531 vuse = VARRAY_TREE (build_vuses, i);
532 for (j = 0; j < num_v_may_defs; j++)
534 if (vuse == VARRAY_TREE (build_v_may_defs, j))
538 /* If we found a useless VUSE operand, remove it from the
539 operand array by replacing it with the last active element
540 in the operand array (unless the useless VUSE was the
541 last operand, in which case we simply remove it. */
542 if (j != num_v_may_defs)
544 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
546 VARRAY_TREE (build_vuses, i)
547 = VARRAY_TREE (build_vuses,
548 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
550 VARRAY_POP (build_vuses);
552 /* We want to rescan the element at this index, unless
553 this was the last element, in which case the loop
560 num = VARRAY_ACTIVE_SIZE (build_vuses);
561 /* We could have reduced the size to zero now, however. */
564 VARRAY_POP_ALL (build_v_may_defs);
568 old_ops = *old_ops_p;
570 /* Determine whether vuses is the same as the old vector. */
572 if (old_ops && old_ops->num_vuses == num)
576 for (x = 0; x < num ; x++)
579 v = old_ops->vuses[x];
580 if (TREE_CODE (v) == SSA_NAME)
581 v = SSA_NAME_VAR (v);
582 if (v != VARRAY_TREE (build_vuses, x))
590 old_num = (old_ops ? old_ops->num_vuses : 0);
599 vuse_ops = allocate_vuse_optype (num);
600 for (x = 0; x < num; x++)
602 tree result, var = VARRAY_TREE (build_vuses, x);
603 /* Look for VAR in the old vector, and use that SSA_NAME. */
604 for (i = 0; i < old_num; i++)
606 result = old_ops->vuses[i];
607 if (TREE_CODE (result) == SSA_NAME)
608 result = SSA_NAME_VAR (result);
611 vuse_ops->vuses[x] = old_ops->vuses[i];
616 vuse_ops->vuses[x] = var;
620 /* The v_may_def build vector wasn't freed because we needed it here.
621 Free it now with the vuses build vector. */
622 VARRAY_POP_ALL (build_vuses);
623 VARRAY_POP_ALL (build_v_may_defs);
629 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
631 static v_must_def_optype
632 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
633 tree stmt ATTRIBUTE_UNUSED)
635 unsigned num, x, i, old_num = 0;
636 v_must_def_optype v_must_def_ops, old_ops;
639 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
643 #ifdef ENABLE_CHECKING
644 /* There should only be a single V_MUST_DEF per assignment. */
645 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
649 old_ops = *old_ops_p;
651 /* Check if the old vector and the new array are the same. */
653 if (old_ops && old_ops->num_v_must_defs == num)
657 for (x = 0; x < num; x++)
659 tree var = old_ops->v_must_defs[x];
660 if (TREE_CODE (var) == SSA_NAME)
661 var = SSA_NAME_VAR (var);
662 if (var != VARRAY_TREE (build_v_must_defs, x))
670 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
674 v_must_def_ops = old_ops;
679 v_must_def_ops = allocate_v_must_def_optype (num);
680 for (x = 0; x < num ; x++)
682 tree result, var = VARRAY_TREE (build_v_must_defs, x);
683 /* Look for VAR in the original vector. */
684 for (i = 0; i < old_num; i++)
686 result = old_ops->v_must_defs[i];
687 if (TREE_CODE (result) == SSA_NAME)
688 result = SSA_NAME_VAR (result);
691 v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
696 v_must_def_ops->v_must_defs[x] = var;
699 VARRAY_POP_ALL (build_v_must_defs);
701 return v_must_def_ops;
705 /* Finalize all the build vectors, fill the new ones into INFO. */
708 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
709 stmt_operands_p new_ops)
711 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
712 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
713 new_ops->v_must_def_ops
714 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
715 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
716 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
720 /* Start the process of building up operands vectors in INFO. */
723 start_ssa_stmt_operands (void)
725 #ifdef ENABLE_CHECKING
726 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
727 || VARRAY_ACTIVE_SIZE (build_uses) > 0
728 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
729 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
730 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
736 /* Add DEF_P to the list of pointers to operands. */
739 append_def (tree *def_p)
741 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
745 /* Add USE_P to the list of pointers to operands. */
748 append_use (tree *use_p)
750 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
754 /* Add a new virtual may def for variable VAR to the build array. */
757 append_v_may_def (tree var)
761 /* Don't allow duplicate entries. */
762 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
763 if (var == VARRAY_TREE (build_v_may_defs, i))
766 VARRAY_PUSH_TREE (build_v_may_defs, var);
770 /* Add VAR to the list of virtual uses. */
773 append_vuse (tree var)
777 /* Don't allow duplicate entries. */
778 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
779 if (var == VARRAY_TREE (build_vuses, i))
782 VARRAY_PUSH_TREE (build_vuses, var);
786 /* Add VAR to the list of virtual must definitions for INFO. */
789 append_v_must_def (tree var)
793 /* Don't allow duplicate entries. */
794 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
795 if (var == VARRAY_TREE (build_v_must_defs, i))
798 VARRAY_PUSH_TREE (build_v_must_defs, var);
801 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
802 original operands, and if ANN is non-null, appropriate stmt flags are set
803 in the stmt's annotation. Note that some fields in old_ops may
804 change to NULL, although none of the memory they originally pointed to
805 will be destroyed. It is appropriate to call free_stmt_operands() on
806 the value returned in old_ops.
808 The rationale for this: Certain optimizations wish to exmaine the difference
809 between new_ops and old_ops after processing. If a set of operands don't
810 change, new_ops will simply assume the pointer in old_ops, and the old_ops
811 pointer will be set to NULL, indicating no memory needs to be cleared.
812 Usage might appear something like:
814 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
815 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
816 <* compare old_ops_copy and new_ops *>
817 free_ssa_operands (old_ops); */
820 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
821 stmt_operands_p new_ops)
824 tree_ann_t saved_ann = stmt->common.ann;
826 /* Replace stmt's annotation with the one passed in for the duration
827 of the operand building process. This allows "fake" stmts to be built
828 and not be included in other data structures which can be built here. */
829 stmt->common.ann = (tree_ann_t) ann;
831 /* Initially assume that the statement has no volatile operands, nor
832 makes aliased loads or stores. */
835 ann->has_volatile_ops = false;
836 ann->makes_aliased_stores = false;
837 ann->makes_aliased_loads = false;
840 start_ssa_stmt_operands ();
842 code = TREE_CODE (stmt);
846 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
847 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
848 || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
849 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
850 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
851 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
852 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
853 modified in that case. FIXME we should represent somehow
854 that it is killed on the fallthrough path. */
855 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
856 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
858 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
859 opf_is_def | opf_kill_def);
863 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
867 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
871 get_asm_expr_operands (stmt);
875 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
879 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
883 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
886 /* These nodes contain no variable references. */
888 case CASE_LABEL_EXPR:
890 case TRY_FINALLY_EXPR:
897 /* Notice that if get_expr_operands tries to use &STMT as the operand
898 pointer (which may only happen for USE operands), we will abort in
899 append_use. This default will handle statements like empty
900 statements, or CALL_EXPRs that may appear on the RHS of a statement
901 or as statements themselves. */
902 get_expr_operands (stmt, &stmt, opf_none);
906 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
907 stmt->common.ann = saved_ann;
911 /* Free any operands vectors in OPS. */
914 free_ssa_operands (stmt_operands_p ops)
917 free_defs (&(ops->def_ops));
919 free_uses (&(ops->use_ops));
921 free_vuses (&(ops->vuse_ops));
922 if (ops->v_may_def_ops)
923 free_v_may_defs (&(ops->v_may_def_ops));
924 if (ops->v_must_def_ops)
925 free_v_must_defs (&(ops->v_must_def_ops));
929 /* Get the operands of statement STMT. Note that repeated calls to
930 get_stmt_operands for the same statement will do nothing until the
931 statement is marked modified by a call to modify_stmt(). */
934 get_stmt_operands (tree stmt)
937 stmt_operands_t old_operands;
939 #if defined ENABLE_CHECKING
940 /* The optimizers cannot handle statements that are nothing but a
941 _DECL. This indicates a bug in the gimplifier. */
942 if (SSA_VAR_P (stmt))
946 /* Ignore error statements. */
947 if (TREE_CODE (stmt) == ERROR_MARK)
950 ann = get_stmt_ann (stmt);
952 /* If the statement has not been modified, the operands are still valid. */
956 timevar_push (TV_TREE_OPS);
958 old_operands = ann->operands;
959 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
961 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
962 free_ssa_operands (&old_operands);
964 /* Clear the modified bit for STMT. Subsequent calls to
965 get_stmt_operands for this statement will do nothing until the
966 statement is marked modified by a call to modify_stmt(). */
969 timevar_pop (TV_TREE_OPS);
973 /* Recursively scan the expression pointed by EXPR_P in statement referred to
974 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
978 get_expr_operands (tree stmt, tree *expr_p, int flags)
984 if (expr == NULL || expr == error_mark_node)
987 code = TREE_CODE (expr);
988 class = TREE_CODE_CLASS (code);
993 /* We could have the address of a component, array member,
994 etc which has interesting variable references. */
995 /* Taking the address of a variable does not represent a
996 reference to it, but the fact that the stmt takes its address will be
997 of interest to some passes (e.g. alias resolution). */
998 add_stmt_operand (expr_p, stmt, 0);
1000 /* If the address is invariant, there may be no interesting variable
1001 references inside. */
1002 if (is_gimple_min_invariant (expr))
1005 /* There should be no VUSEs created, since the referenced objects are
1006 not really accessed. The only operands that we should find here
1007 are ARRAY_REF indices which will always be real operands (GIMPLE
1008 does not allow non-registers as array indices). */
1009 flags |= opf_no_vops;
1011 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1018 /* If we found a variable, add it to DEFS or USES depending
1019 on the operand flags. */
1020 add_stmt_operand (expr_p, stmt, flags);
1024 get_indirect_ref_operands (stmt, expr, flags);
1028 case ARRAY_RANGE_REF:
1029 /* Treat array references as references to the virtual variable
1030 representing the array. The virtual variable for an ARRAY_REF
1031 is the VAR_DECL for the array. */
1033 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1034 according to the value of IS_DEF. Recurse if the LHS of the
1035 ARRAY_REF node is not a regular variable. */
1036 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1037 add_stmt_operand (expr_p, stmt, flags);
1039 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1041 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1042 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1043 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1049 /* Similarly to arrays, references to compound variables (complex
1050 types and structures/unions) are globbed.
1052 FIXME: This means that
1058 will not be constant propagated because the two partial
1059 definitions to 'a' will kill each other. Note that SRA may be
1060 able to fix this problem if 'a' can be scalarized. */
1062 /* If the LHS of the compound reference is not a regular variable,
1063 recurse to keep looking for more operands in the subexpression. */
1064 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1065 add_stmt_operand (expr_p, stmt, flags);
1067 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1069 if (code == COMPONENT_REF)
1070 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1073 case WITH_SIZE_EXPR:
1074 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1075 and an rvalue reference to its second argument. */
1076 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1077 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1081 get_call_expr_operands (stmt, expr);
1089 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1091 op = TREE_OPERAND (expr, 0);
1092 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1093 op = TREE_OPERAND (expr, 0);
1094 if (TREE_CODE (op) == ARRAY_REF
1095 || TREE_CODE (op) == ARRAY_RANGE_REF
1096 || TREE_CODE (op) == COMPONENT_REF
1097 || TREE_CODE (op) == REALPART_EXPR
1098 || TREE_CODE (op) == IMAGPART_EXPR)
1099 subflags = opf_is_def;
1101 subflags = opf_is_def | opf_kill_def;
1103 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1109 /* General aggregate CONSTRUCTORs have been decomposed, but they
1110 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1113 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1114 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1119 case TRUTH_NOT_EXPR:
1121 case VIEW_CONVERT_EXPR:
1123 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1126 case TRUTH_AND_EXPR:
1128 case TRUTH_XOR_EXPR:
1133 tree op0 = TREE_OPERAND (expr, 0);
1134 tree op1 = TREE_OPERAND (expr, 1);
1136 /* If it would be profitable to swap the operands, then do so to
1137 canonicalize the statement, enabling better optimization.
1139 By placing canonicalization of such expressions here we
1140 transparently keep statements in canonical form, even
1141 when the statement is modified. */
1142 if (tree_swap_operands_p (op0, op1, false))
1144 /* For relationals we need to swap the operands
1145 and change the code. */
1151 TREE_SET_CODE (expr, swap_tree_comparison (code));
1152 TREE_OPERAND (expr, 0) = op1;
1153 TREE_OPERAND (expr, 1) = op0;
1156 /* For a commutative operator we can just swap the operands. */
1157 else if (commutative_tree_code (code))
1159 TREE_OPERAND (expr, 0) = op1;
1160 TREE_OPERAND (expr, 1) = op0;
1164 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1165 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1174 /* Expressions that make no memory references. */
1180 if (class == '2' || class == '<')
1182 if (class == 'c' || class == 't')
1186 /* If we get here, something has gone wrong. */
1187 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1189 fputs ("\n", stderr);
1194 /* Scan operands in the ASM_EXPR stmt refered to in INFO. */
1197 get_asm_expr_operands (tree stmt)
1199 stmt_ann_t s_ann = stmt_ann (stmt);
1200 int noutputs = list_length (ASM_OUTPUTS (stmt));
1201 const char **oconstraints
1202 = (const char **) alloca ((noutputs) * sizeof (const char *));
1205 const char *constraint;
1206 bool allows_mem, allows_reg, is_inout;
1208 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1210 oconstraints[i] = constraint
1211 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1212 parse_output_constraint (&constraint, i, 0, 0,
1213 &allows_mem, &allows_reg, &is_inout);
1215 #if defined ENABLE_CHECKING
1216 /* This should have been split in gimplify_asm_expr. */
1217 if (allows_reg && is_inout)
1221 /* Memory operands are addressable. Note that STMT needs the
1222 address of this operand. */
1223 if (!allows_reg && allows_mem)
1225 tree t = get_base_address (TREE_VALUE (link));
1226 if (t && DECL_P (t))
1227 note_addressable (t, s_ann);
1230 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1233 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1236 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1237 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1238 oconstraints, &allows_mem, &allows_reg);
1240 /* Memory operands are addressable. Note that STMT needs the
1241 address of this operand. */
1242 if (!allows_reg && allows_mem)
1244 tree t = get_base_address (TREE_VALUE (link));
1245 if (t && DECL_P (t))
1246 note_addressable (t, s_ann);
1249 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1253 /* Clobber memory for asm ("" : : : "memory"); */
1254 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1255 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1259 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1260 decided to group them). */
1262 add_stmt_operand (&global_var, stmt, opf_is_def);
1264 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1266 tree var = referenced_var (i);
1267 add_stmt_operand (&var, stmt, opf_is_def);
1270 /* Now clobber all addressables. */
1271 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1273 tree var = referenced_var (i);
1274 add_stmt_operand (&var, stmt, opf_is_def);
1281 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1284 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1286 tree *pptr = &TREE_OPERAND (expr, 0);
1288 stmt_ann_t ann = stmt_ann (stmt);
1290 /* Stores into INDIRECT_REF operands are never killing definitions. */
1291 flags &= ~opf_kill_def;
1293 if (SSA_VAR_P (ptr))
1295 struct ptr_info_def *pi = NULL;
1297 /* If PTR has flow-sensitive points-to information, use it. */
1298 if (TREE_CODE (ptr) == SSA_NAME
1299 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1300 && pi->name_mem_tag)
1302 /* PTR has its own memory tag. Use it. */
1303 add_stmt_operand (&pi->name_mem_tag, stmt, flags);
1307 /* If PTR is not an SSA_NAME or it doesn't have a name
1308 tag, use its type memory tag. */
1311 /* If we are emitting debugging dumps, display a warning if
1312 PTR is an SSA_NAME with no flow-sensitive alias
1313 information. That means that we may need to compute
1316 && TREE_CODE (ptr) == SSA_NAME
1320 "NOTE: no flow-sensitive alias info for ");
1321 print_generic_expr (dump_file, ptr, dump_flags);
1322 fprintf (dump_file, " in ");
1323 print_generic_stmt (dump_file, stmt, dump_flags);
1326 if (TREE_CODE (ptr) == SSA_NAME)
1327 ptr = SSA_NAME_VAR (ptr);
1328 ann = var_ann (ptr);
1329 if (ann->type_mem_tag)
1330 add_stmt_operand (&ann->type_mem_tag, stmt, flags);
1334 /* If a constant is used as a pointer, we can't generate a real
1335 operand for it but we mark the statement volatile to prevent
1336 optimizations from messing things up. */
1337 else if (TREE_CODE (ptr) == INTEGER_CST)
1340 ann->has_volatile_ops = true;
1344 /* Everything else *should* have been folded elsewhere, but users
1345 are smarter than we in finding ways to write invalid code. We
1346 cannot just abort here. If we were absolutely certain that we
1347 do handle all valid cases, then we could just do nothing here.
1348 That seems optimistic, so attempt to do something logical... */
1349 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1350 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1351 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1353 /* Make sure we know the object is addressable. */
1354 pptr = &TREE_OPERAND (ptr, 0);
1355 add_stmt_operand (pptr, stmt, 0);
1357 /* Mark the object itself with a VUSE. */
1358 pptr = &TREE_OPERAND (*pptr, 0);
1359 get_expr_operands (stmt, pptr, flags);
1363 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1367 /* Add a USE operand for the base pointer. */
1368 get_expr_operands (stmt, pptr, opf_none);
1371 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1374 get_call_expr_operands (tree stmt, tree expr)
1377 int call_flags = call_expr_flags (expr);
1379 /* Find uses in the called function. */
1380 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1382 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1383 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1385 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1387 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1389 /* A 'pure' or a 'const' functions never call clobber anything.
1390 A 'noreturn' function might, but since we don't return anyway
1391 there is no point in recording that. */
1392 if (TREE_SIDE_EFFECTS (expr)
1393 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1394 add_call_clobber_ops (stmt);
1395 else if (!(call_flags & ECF_CONST))
1396 add_call_read_ops (stmt);
1401 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1402 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1403 the statement's real operands, otherwise it is added to virtual
1407 add_stmt_operand (tree *var_p, tree stmt, int flags)
1411 stmt_ann_t s_ann = stmt_ann (stmt);
1417 /* If the operand is an ADDR_EXPR, add its operand to the list of
1418 variables that have had their address taken in this statement. */
1419 if (TREE_CODE (var) == ADDR_EXPR)
1421 note_addressable (TREE_OPERAND (var, 0), s_ann);
1425 /* If the original variable is not a scalar, it will be added to the list
1426 of virtual operands. In that case, use its base symbol as the virtual
1427 variable representing it. */
1428 is_real_op = is_gimple_reg (var);
1429 if (!is_real_op && !DECL_P (var))
1430 var = get_virtual_var (var);
1432 /* If VAR is not a variable that we care to optimize, do nothing. */
1433 if (var == NULL_TREE || !SSA_VAR_P (var))
1436 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1437 v_ann = var_ann (sym);
1439 /* Don't expose volatile variables to the optimizers. */
1440 if (TREE_THIS_VOLATILE (sym))
1443 s_ann->has_volatile_ops = true;
1449 /* The variable is a GIMPLE register. Add it to real operands. */
1450 if (flags & opf_is_def)
1457 varray_type aliases;
1459 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1460 virtual operands, unless the caller has specifically requested
1461 not to add virtual operands (used when adding operands inside an
1462 ADDR_EXPR expression). */
1463 if (flags & opf_no_vops)
1466 aliases = v_ann->may_aliases;
1468 if (aliases == NULL)
1470 /* The variable is not aliased or it is an alias tag. */
1471 if (flags & opf_is_def)
1473 if (v_ann->is_alias_tag)
1475 /* Alias tagged vars get V_MAY_DEF to avoid breaking
1476 def-def chains with the other variables in their
1479 s_ann->makes_aliased_stores = 1;
1480 append_v_may_def (var);
1482 else if (flags & opf_kill_def)
1484 #if defined ENABLE_CHECKING
1485 /* Only regular variables may get a V_MUST_DEF
1487 if (v_ann->mem_tag_kind != NOT_A_TAG)
1490 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1491 variable definitions. */
1492 append_v_must_def (var);
1496 /* Add a V_MAY_DEF for call-clobbered variables and
1498 append_v_may_def (var);
1504 if (s_ann && v_ann->is_alias_tag)
1505 s_ann->makes_aliased_loads = 1;
1512 /* The variable is aliased. Add its aliases to the virtual
1514 #if defined ENABLE_CHECKING
1515 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1519 if (flags & opf_is_def)
1521 /* If the variable is also an alias tag, add a virtual
1522 operand for it, otherwise we will miss representing
1523 references to the members of the variable's alias set.
1524 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1525 if (v_ann->is_alias_tag)
1526 append_v_may_def (var);
1528 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1529 append_v_may_def (VARRAY_TREE (aliases, i));
1532 s_ann->makes_aliased_stores = 1;
1536 /* Similarly, append a virtual uses for VAR itself, when
1537 it is an alias tag. */
1538 if (v_ann->is_alias_tag)
1541 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1542 append_vuse (VARRAY_TREE (aliases, i));
1545 s_ann->makes_aliased_loads = 1;
1552 /* Record that VAR had its address taken in the statement with annotations
1556 note_addressable (tree var, stmt_ann_t s_ann)
1561 var = get_base_address (var);
1562 if (var && SSA_VAR_P (var))
1564 if (s_ann->addresses_taken == NULL)
1565 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1566 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1571 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1572 clobbered variables in the function. */
1575 add_call_clobber_ops (tree stmt)
1577 /* Functions that are not const, pure or never return may clobber
1578 call-clobbered variables. */
1579 if (stmt_ann (stmt))
1580 stmt_ann (stmt)->makes_clobbering_call = true;
1582 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1583 a V_MAY_DEF operand for every call clobbered variable. See
1584 compute_may_aliases for the heuristic used to decide whether
1585 to create .GLOBAL_VAR or not. */
1587 add_stmt_operand (&global_var, stmt, opf_is_def);
1592 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1594 tree var = referenced_var (i);
1596 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1598 if (!TREE_READONLY (var))
1599 add_stmt_operand (&var, stmt, opf_is_def);
1601 add_stmt_operand (&var, stmt, opf_none);
1607 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1611 add_call_read_ops (tree stmt)
1613 /* Otherwise, if the function is not pure, it may reference memory. Add
1614 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1615 for each call-clobbered variable. See add_referenced_var for the
1616 heuristic used to decide whether to create .GLOBAL_VAR. */
1618 add_stmt_operand (&global_var, stmt, opf_none);
1623 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1625 tree var = referenced_var (i);
1626 add_stmt_operand (&var, stmt, opf_none);
1631 /* Copies virtual operands from SRC to DST. */
1634 copy_virtual_operands (tree dst, tree src)
1637 vuse_optype vuses = STMT_VUSE_OPS (src);
1638 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1639 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1640 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1641 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1642 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1646 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1647 for (i = 0; i < NUM_VUSES (vuses); i++)
1648 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1653 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1654 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1656 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1657 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1658 V_MAY_DEF_RESULT (v_may_defs, i));
1664 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1665 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1666 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1671 /* Specifically for use in DOM's expression analysis. Given a store, we
1672 create an artifical stmt which looks like a load from the store, this can
1673 be used to eliminate redundant loads. OLD_OPS are the operands from the
1674 store stmt, and NEW_STMT is the new load which reperesent a load of the
1678 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1682 stmt_operands_t tmp;
1685 memset (&tmp, 0, sizeof (stmt_operands_t));
1686 ann = get_stmt_ann (new_stmt);
1688 /* Free operands just in case is was an existing stmt. */
1689 free_ssa_operands (&(ann->operands));
1691 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1692 free_vuses (&(ann->operands.vuse_ops));
1693 free_v_may_defs (&(ann->operands.v_may_def_ops));
1694 free_v_must_defs (&(ann->operands.v_must_def_ops));
1696 /* For each VDEF on the original statement, we want to create a
1697 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1699 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1701 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1705 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1707 op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
1711 /* Now set the vuses for this new stmt. */
1712 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1715 #include "gt-tree-ssa-operands.h"