1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
36 /* This file contains the code required to mnage the operands cache of the
37 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
38 annotation. This cache contains operands that will be of interets to
39 optimizers and other passes wishing to manipulate the IL.
41 The operand type are broken up into REAL and VIRTUAL operands. The real
42 operands are represented as pointers into the stmt's operand tree. Thus
43 any manipulation of the real operands will be reflected in the actual tree.
44 Virtual operands are represented solely in the cache, although the base
45 variable for the SSA_NAME may, or may not occur in the stmt's tree.
46 Manipulation of the virtual operands will not be reflected in the stmt tree.
48 The routines in this file are concerned with creating this operand cache
51 get_stmt_operands() in the primary entry point.
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If its a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 ie, if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
82 /* Flags to describe operand properties in get_stmt_operands and helpers. */
84 /* By default, operands are loaded. */
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
100 #define opf_no_vops (1 << 2)
102 /* Array for building all the def operands. */
103 static GTY (()) varray_type build_defs;
105 /* Array for building all the use operands. */
106 static GTY (()) varray_type build_uses;
108 /* Array for building all the v_may_def operands. */
109 static GTY (()) varray_type build_v_may_defs;
111 /* Array for building all the vuse operands. */
112 static GTY (()) varray_type build_vuses;
114 /* Array for building all the v_must_def operands. */
115 static GTY (()) varray_type build_v_must_defs;
118 #ifdef ENABLE_CHECKING
119 /* Used to make sure operand construction is working on the proper stmt. */
120 tree check_build_stmt;
123 def_operand_p NULL_DEF_OPERAND_P = { NULL };
124 use_operand_p NULL_USE_OPERAND_P = { NULL };
126 static void note_addressable (tree, stmt_ann_t);
127 static void get_expr_operands (tree, tree *, int);
128 static void get_asm_expr_operands (tree);
129 static void get_indirect_ref_operands (tree, tree, int);
130 static void get_call_expr_operands (tree, tree);
131 static inline void append_def (tree *);
132 static inline void append_use (tree *);
133 static void append_v_may_def (tree);
134 static void append_v_must_def (tree);
135 static void add_call_clobber_ops (tree);
136 static void add_call_read_ops (tree);
137 static void add_stmt_operand (tree *, tree, int);
139 /* Return a vector of contiguous memory for NUM def operands. */
141 static inline def_optype
142 allocate_def_optype (unsigned num)
146 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
147 def_ops = ggc_alloc (size);
148 def_ops->num_defs = num;
153 /* Return a vector of contiguous memory for NUM use operands. */
155 static inline use_optype
156 allocate_use_optype (unsigned num)
160 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
161 use_ops = ggc_alloc (size);
162 use_ops->num_uses = num;
167 /* Return a vector of contiguous memory for NUM v_may_def operands. */
169 static inline v_may_def_optype
170 allocate_v_may_def_optype (unsigned num)
172 v_may_def_optype v_may_def_ops;
174 size = sizeof (struct v_may_def_optype_d)
175 + sizeof (v_may_def_operand_type_t) * (num - 1);
176 v_may_def_ops = ggc_alloc (size);
177 v_may_def_ops->num_v_may_defs = num;
178 return v_may_def_ops;
182 /* Return a vector of contiguous memory for NUM v_use operands. */
184 static inline vuse_optype
185 allocate_vuse_optype (unsigned num)
187 vuse_optype vuse_ops;
189 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
190 vuse_ops = ggc_alloc (size);
191 vuse_ops->num_vuses = num;
196 /* Return a vector of contiguous memory for NUM v_must_def operands. */
198 static inline v_must_def_optype
199 allocate_v_must_def_optype (unsigned num)
201 v_must_def_optype v_must_def_ops;
203 size = sizeof (struct v_must_def_optype_d) + sizeof (tree) * (num - 1);
204 v_must_def_ops = ggc_alloc (size);
205 v_must_def_ops->num_v_must_defs = num;
206 return v_must_def_ops;
210 /* Free memory for USES. */
213 free_uses (use_optype *uses)
223 /* Free memory for DEFS. */
226 free_defs (def_optype *defs)
236 /* Free memory for VUSES. */
239 free_vuses (vuse_optype *vuses)
249 /* Free memory for V_MAY_DEFS. */
252 free_v_may_defs (v_may_def_optype *v_may_defs)
256 ggc_free (*v_may_defs);
262 /* Free memory for V_MUST_DEFS. */
265 free_v_must_defs (v_must_def_optype *v_must_defs)
269 ggc_free (*v_must_defs);
275 /* Initialize the operand cache routines. */
278 init_ssa_operands (void)
280 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
281 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
282 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
283 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
284 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
288 /* Dispose of anything required by the operand routines. */
291 fini_ssa_operands (void)
293 ggc_free (build_defs);
294 ggc_free (build_uses);
295 ggc_free (build_v_may_defs);
296 ggc_free (build_vuses);
297 ggc_free (build_v_must_defs);
300 build_v_may_defs = NULL;
302 build_v_must_defs = NULL;
306 /* All the finalize_ssa_* routines do the work required to turn the build_
307 VARRAY into an operand_vector of the appropriate type. The original vector,
308 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
309 old vector is reused, the pointer passed in is set to NULL so that
310 the memory is not freed when the old operands are freed. */
312 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
315 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
318 def_optype def_ops, old_ops;
321 num = VARRAY_ACTIVE_SIZE (build_defs);
325 #ifdef ENABLE_CHECKING
326 /* There should only be a single real definition per assignment. */
327 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
331 old_ops = *old_ops_p;
333 /* Compare old vector and new array. */
335 if (old_ops && old_ops->num_defs == num)
338 for (x = 0; x < num; x++)
339 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
353 def_ops = allocate_def_optype (num);
354 for (x = 0; x < num ; x++)
355 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
358 VARRAY_POP_ALL (build_defs);
364 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
367 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
370 use_optype use_ops, old_ops;
373 num = VARRAY_ACTIVE_SIZE (build_uses);
377 #ifdef ENABLE_CHECKING
380 /* If the pointer to the operand is the statement itself, something is
381 wrong. It means that we are pointing to a local variable (the
382 initial call to get_stmt_operands does not pass a pointer to a
384 for (x = 0; x < num; x++)
385 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
389 old_ops = *old_ops_p;
391 /* Check if the old vector and the new array are the same. */
393 if (old_ops && old_ops->num_uses == num)
396 for (x = 0; x < num; x++)
397 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
411 use_ops = allocate_use_optype (num);
412 for (x = 0; x < num ; x++)
413 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
415 VARRAY_POP_ALL (build_uses);
421 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
423 static v_may_def_optype
424 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
426 unsigned num, x, i, old_num;
427 v_may_def_optype v_may_def_ops, old_ops;
431 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
435 old_ops = *old_ops_p;
437 /* Check if the old vector and the new array are the same. */
439 if (old_ops && old_ops->num_v_may_defs == num)
443 for (x = 0; x < num; x++)
445 var = old_ops->v_may_defs[x].def;
446 if (TREE_CODE (var) == SSA_NAME)
447 var = SSA_NAME_VAR (var);
448 if (var != VARRAY_TREE (build_v_may_defs, x))
456 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
460 v_may_def_ops = old_ops;
465 v_may_def_ops = allocate_v_may_def_optype (num);
466 for (x = 0; x < num; x++)
468 var = VARRAY_TREE (build_v_may_defs, x);
469 /* Look for VAR in the old operands vector. */
470 for (i = 0; i < old_num; i++)
472 result = old_ops->v_may_defs[i].def;
473 if (TREE_CODE (result) == SSA_NAME)
474 result = SSA_NAME_VAR (result);
477 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
483 v_may_def_ops->v_may_defs[x].def = var;
484 v_may_def_ops->v_may_defs[x].use = var;
489 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
491 return v_may_def_ops;
495 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
498 finalize_ssa_vuses (vuse_optype *old_ops_p)
500 unsigned num, x, i, num_v_may_defs, old_num;
501 vuse_optype vuse_ops, old_ops;
504 num = VARRAY_ACTIVE_SIZE (build_vuses);
507 VARRAY_POP_ALL (build_v_may_defs);
511 /* Remove superfluous VUSE operands. If the statement already has a
512 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
513 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
514 suppose that variable 'a' is aliased:
517 # a_3 = V_MAY_DEF <a_2>
520 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
523 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
525 if (num_v_may_defs > 0)
529 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
531 vuse = VARRAY_TREE (build_vuses, i);
532 for (j = 0; j < num_v_may_defs; j++)
534 if (vuse == VARRAY_TREE (build_v_may_defs, j))
538 /* If we found a useless VUSE operand, remove it from the
539 operand array by replacing it with the last active element
540 in the operand array (unless the useless VUSE was the
541 last operand, in which case we simply remove it. */
542 if (j != num_v_may_defs)
544 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
546 VARRAY_TREE (build_vuses, i)
547 = VARRAY_TREE (build_vuses,
548 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
550 VARRAY_POP (build_vuses);
552 /* We want to rescan the element at this index, unless
553 this was the last element, in which case the loop
560 num = VARRAY_ACTIVE_SIZE (build_vuses);
561 /* We could have reduced the size to zero now, however. */
564 VARRAY_POP_ALL (build_v_may_defs);
568 old_ops = *old_ops_p;
570 /* Determine whether vuses is the same as the old vector. */
572 if (old_ops && old_ops->num_vuses == num)
576 for (x = 0; x < num ; x++)
579 v = old_ops->vuses[x];
580 if (TREE_CODE (v) == SSA_NAME)
581 v = SSA_NAME_VAR (v);
582 if (v != VARRAY_TREE (build_vuses, x))
590 old_num = (old_ops ? old_ops->num_vuses : 0);
599 vuse_ops = allocate_vuse_optype (num);
600 for (x = 0; x < num; x++)
602 tree result, var = VARRAY_TREE (build_vuses, x);
603 /* Look for VAR in the old vector, and use that SSA_NAME. */
604 for (i = 0; i < old_num; i++)
606 result = old_ops->vuses[i];
607 if (TREE_CODE (result) == SSA_NAME)
608 result = SSA_NAME_VAR (result);
611 vuse_ops->vuses[x] = old_ops->vuses[i];
616 vuse_ops->vuses[x] = var;
620 /* The v_may_def build vector wasn't freed because we needed it here.
621 Free it now with the vuses build vector. */
622 VARRAY_POP_ALL (build_vuses);
623 VARRAY_POP_ALL (build_v_may_defs);
628 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
630 static v_must_def_optype
631 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
632 tree stmt ATTRIBUTE_UNUSED)
634 unsigned num, x, i, old_num = 0;
635 v_must_def_optype v_must_def_ops, old_ops;
638 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
642 #ifdef ENABLE_CHECKING
643 /* There should only be a single V_MUST_DEF per assignment. */
644 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
648 old_ops = *old_ops_p;
650 /* Check if the old vector and the new array are the same. */
652 if (old_ops && old_ops->num_v_must_defs == num)
656 for (x = 0; x < num; x++)
658 tree var = old_ops->v_must_defs[x];
659 if (TREE_CODE (var) == SSA_NAME)
660 var = SSA_NAME_VAR (var);
661 if (var != VARRAY_TREE (build_v_must_defs, x))
669 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
673 v_must_def_ops = old_ops;
678 v_must_def_ops = allocate_v_must_def_optype (num);
679 for (x = 0; x < num ; x++)
681 tree result, var = VARRAY_TREE (build_v_must_defs, x);
682 /* Look for VAR in the original vector. */
683 for (i = 0; i < old_num; i++)
685 result = old_ops->v_must_defs[i];
686 if (TREE_CODE (result) == SSA_NAME)
687 result = SSA_NAME_VAR (result);
690 v_must_def_ops->v_must_defs[x] = old_ops->v_must_defs[i];
695 v_must_def_ops->v_must_defs[x] = var;
698 VARRAY_POP_ALL (build_v_must_defs);
700 return v_must_def_ops;
704 /* Finalize all the build vectors, fill the new ones into INFO. */
707 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
708 stmt_operands_p new_ops)
710 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
711 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
712 new_ops->v_must_def_ops
713 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
714 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
715 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
719 /* Start the process of building up operands vectors in INFO. */
722 start_ssa_stmt_operands (void)
724 #ifdef ENABLE_CHECKING
725 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
726 || VARRAY_ACTIVE_SIZE (build_uses) > 0
727 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
728 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
729 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
735 /* Add DEF_P to the list of pointers to operands. */
738 append_def (tree *def_p)
740 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
744 /* Add USE_P to the list of pointers to operands. */
747 append_use (tree *use_p)
749 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
753 /* Add a new virtual may def for variable VAR to the build array. */
756 append_v_may_def (tree var)
760 /* Don't allow duplicate entries. */
761 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
762 if (var == VARRAY_TREE (build_v_may_defs, i))
765 VARRAY_PUSH_TREE (build_v_may_defs, var);
769 /* Add VAR to the list of virtual uses. */
772 append_vuse (tree var)
776 /* Don't allow duplicate entries. */
777 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
778 if (var == VARRAY_TREE (build_vuses, i))
781 VARRAY_PUSH_TREE (build_vuses, var);
785 /* Add VAR to the list of virtual must definitions for INFO. */
788 append_v_must_def (tree var)
792 /* Don't allow duplicate entries. */
793 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
794 if (var == VARRAY_TREE (build_v_must_defs, i))
797 VARRAY_PUSH_TREE (build_v_must_defs, var);
800 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
801 original operands, and if ANN is non-null, appropriate stmt flags are set
802 in the stmt's annotation. Note that some fields in old_ops may
803 change to NULL, although none of the memory they originally pointed to
804 will be destroyed. It is appropriate to call free_stmt_operands() on
805 the value returned in old_ops.
807 The rationale for this: Certain optimizations wish to exmaine the difference
808 between new_ops and old_ops after processing. If a set of operands don't
809 change, new_ops will simply assume the pointer in old_ops, and the old_ops
810 pointer will be set to NULL, indicating no memory needs to be cleared.
811 Usage might appear something like:
813 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
814 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
815 <* compare old_ops_copy and new_ops *>
816 free_ssa_operands (old_ops); */
819 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
820 stmt_operands_p new_ops)
823 tree_ann_t saved_ann = stmt->common.ann;
825 /* Replace stmt's annotation with the one passed in for the duration
826 of the operand building process. This allows "fake" stmts to be built
827 and not be included in other data structures which can be built here. */
828 stmt->common.ann = (tree_ann_t) ann;
830 /* Initially assume that the statement has no volatile operands, nor
831 makes aliased loads or stores. */
834 ann->has_volatile_ops = false;
835 ann->makes_aliased_stores = false;
836 ann->makes_aliased_loads = false;
839 start_ssa_stmt_operands ();
841 code = TREE_CODE (stmt);
845 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
846 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
847 || TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_RANGE_REF
848 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
849 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
850 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
851 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
852 modified in that case. FIXME we should represent somehow
853 that it is killed on the fallthrough path. */
854 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
855 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def);
857 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
858 opf_is_def | opf_kill_def);
862 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
866 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
870 get_asm_expr_operands (stmt);
874 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
878 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
882 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
885 /* These nodes contain no variable references. */
887 case CASE_LABEL_EXPR:
889 case TRY_FINALLY_EXPR:
896 /* Notice that if get_expr_operands tries to use &STMT as the operand
897 pointer (which may only happen for USE operands), we will abort in
898 append_use. This default will handle statements like empty
899 statements, or CALL_EXPRs that may appear on the RHS of a statement
900 or as statements themselves. */
901 get_expr_operands (stmt, &stmt, opf_none);
905 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
906 stmt->common.ann = saved_ann;
910 /* Free any operands vectors in OPS. */
913 free_ssa_operands (stmt_operands_p ops)
916 free_defs (&(ops->def_ops));
918 free_uses (&(ops->use_ops));
920 free_vuses (&(ops->vuse_ops));
921 if (ops->v_may_def_ops)
922 free_v_may_defs (&(ops->v_may_def_ops));
923 if (ops->v_must_def_ops)
924 free_v_must_defs (&(ops->v_must_def_ops));
928 /* Get the operands of statement STMT. Note that repeated calls to
929 get_stmt_operands for the same statement will do nothing until the
930 statement is marked modified by a call to modify_stmt(). */
933 get_stmt_operands (tree stmt)
936 stmt_operands_t old_operands;
938 #if defined ENABLE_CHECKING
939 /* The optimizers cannot handle statements that are nothing but a
940 _DECL. This indicates a bug in the gimplifier. */
941 if (SSA_VAR_P (stmt))
945 /* Ignore error statements. */
946 if (TREE_CODE (stmt) == ERROR_MARK)
949 ann = get_stmt_ann (stmt);
951 /* If the statement has not been modified, the operands are still valid. */
955 timevar_push (TV_TREE_OPS);
957 old_operands = ann->operands;
958 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
960 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
961 free_ssa_operands (&old_operands);
963 /* Clear the modified bit for STMT. Subsequent calls to
964 get_stmt_operands for this statement will do nothing until the
965 statement is marked modified by a call to modify_stmt(). */
968 timevar_pop (TV_TREE_OPS);
972 /* Recursively scan the expression pointed by EXPR_P in statement referred to
973 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
977 get_expr_operands (tree stmt, tree *expr_p, int flags)
983 if (expr == NULL || expr == error_mark_node)
986 code = TREE_CODE (expr);
987 class = TREE_CODE_CLASS (code);
992 /* We could have the address of a component, array member,
993 etc which has interesting variable references. */
994 /* Taking the address of a variable does not represent a
995 reference to it, but the fact that the stmt takes its address will be
996 of interest to some passes (e.g. alias resolution). */
997 add_stmt_operand (expr_p, stmt, 0);
999 /* If the address is invariant, there may be no interesting variable
1000 references inside. */
1001 if (is_gimple_min_invariant (expr))
1004 /* There should be no VUSEs created, since the referenced objects are
1005 not really accessed. The only operands that we should find here
1006 are ARRAY_REF indices which will always be real operands (GIMPLE
1007 does not allow non-registers as array indices). */
1008 flags |= opf_no_vops;
1010 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1017 /* If we found a variable, add it to DEFS or USES depending
1018 on the operand flags. */
1019 add_stmt_operand (expr_p, stmt, flags);
1023 get_indirect_ref_operands (stmt, expr, flags);
1027 case ARRAY_RANGE_REF:
1028 /* Treat array references as references to the virtual variable
1029 representing the array. The virtual variable for an ARRAY_REF
1030 is the VAR_DECL for the array. */
1032 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1033 according to the value of IS_DEF. Recurse if the LHS of the
1034 ARRAY_REF node is not a regular variable. */
1035 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1036 add_stmt_operand (expr_p, stmt, flags);
1038 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1040 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1041 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1042 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1048 /* Similarly to arrays, references to compound variables (complex
1049 types and structures/unions) are globbed.
1051 FIXME: This means that
1057 will not be constant propagated because the two partial
1058 definitions to 'a' will kill each other. Note that SRA may be
1059 able to fix this problem if 'a' can be scalarized. */
1061 /* If the LHS of the compound reference is not a regular variable,
1062 recurse to keep looking for more operands in the subexpression. */
1063 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1064 add_stmt_operand (expr_p, stmt, flags);
1066 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1068 if (code == COMPONENT_REF)
1069 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1072 case WITH_SIZE_EXPR:
1073 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1074 and an rvalue reference to its second argument. */
1075 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1076 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1080 get_call_expr_operands (stmt, expr);
1084 get_expr_operands (stmt, &COND_EXPR_COND (expr), opf_none);
1085 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1086 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1094 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1096 op = TREE_OPERAND (expr, 0);
1097 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1098 op = TREE_OPERAND (expr, 0);
1099 if (TREE_CODE (op) == ARRAY_REF
1100 || TREE_CODE (op) == ARRAY_RANGE_REF
1101 || TREE_CODE (op) == COMPONENT_REF
1102 || TREE_CODE (op) == REALPART_EXPR
1103 || TREE_CODE (op) == IMAGPART_EXPR)
1104 subflags = opf_is_def;
1106 subflags = opf_is_def | opf_kill_def;
1108 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1114 /* General aggregate CONSTRUCTORs have been decomposed, but they
1115 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1118 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1119 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1124 case TRUTH_NOT_EXPR:
1126 case VIEW_CONVERT_EXPR:
1128 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1131 case TRUTH_AND_EXPR:
1133 case TRUTH_XOR_EXPR:
1138 tree op0 = TREE_OPERAND (expr, 0);
1139 tree op1 = TREE_OPERAND (expr, 1);
1141 /* If it would be profitable to swap the operands, then do so to
1142 canonicalize the statement, enabling better optimization.
1144 By placing canonicalization of such expressions here we
1145 transparently keep statements in canonical form, even
1146 when the statement is modified. */
1147 if (tree_swap_operands_p (op0, op1, false))
1149 /* For relationals we need to swap the operands
1150 and change the code. */
1156 TREE_SET_CODE (expr, swap_tree_comparison (code));
1157 TREE_OPERAND (expr, 0) = op1;
1158 TREE_OPERAND (expr, 1) = op0;
1161 /* For a commutative operator we can just swap the operands. */
1162 else if (commutative_tree_code (code))
1164 TREE_OPERAND (expr, 0) = op1;
1165 TREE_OPERAND (expr, 1) = op0;
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1170 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1179 /* Expressions that make no memory references. */
1185 if (class == '2' || class == '<')
1187 if (class == 'c' || class == 't')
1191 /* If we get here, something has gone wrong. */
1192 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1194 fputs ("\n", stderr);
1199 /* Scan operands in the ASM_EXPR stmt refered to in INFO. */
1202 get_asm_expr_operands (tree stmt)
1204 stmt_ann_t s_ann = stmt_ann (stmt);
1205 int noutputs = list_length (ASM_OUTPUTS (stmt));
1206 const char **oconstraints
1207 = (const char **) alloca ((noutputs) * sizeof (const char *));
1210 const char *constraint;
1211 bool allows_mem, allows_reg, is_inout;
1213 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1215 oconstraints[i] = constraint
1216 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1217 parse_output_constraint (&constraint, i, 0, 0,
1218 &allows_mem, &allows_reg, &is_inout);
1220 #if defined ENABLE_CHECKING
1221 /* This should have been split in gimplify_asm_expr. */
1222 if (allows_reg && is_inout)
1226 /* Memory operands are addressable. Note that STMT needs the
1227 address of this operand. */
1228 if (!allows_reg && allows_mem)
1230 tree t = get_base_address (TREE_VALUE (link));
1231 if (t && DECL_P (t))
1232 note_addressable (t, s_ann);
1235 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1238 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1241 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1242 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1243 oconstraints, &allows_mem, &allows_reg);
1245 /* Memory operands are addressable. Note that STMT needs the
1246 address of this operand. */
1247 if (!allows_reg && allows_mem)
1249 tree t = get_base_address (TREE_VALUE (link));
1250 if (t && DECL_P (t))
1251 note_addressable (t, s_ann);
1254 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1258 /* Clobber memory for asm ("" : : : "memory"); */
1259 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1260 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1264 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1265 decided to group them). */
1267 add_stmt_operand (&global_var, stmt, opf_is_def);
1269 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1271 tree var = referenced_var (i);
1272 add_stmt_operand (&var, stmt, opf_is_def);
1275 /* Now clobber all addressables. */
1276 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1278 tree var = referenced_var (i);
1279 add_stmt_operand (&var, stmt, opf_is_def);
1286 /* A subroutine of get_expr_operands to handle INDIRECT_REF. */
1289 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1291 tree *pptr = &TREE_OPERAND (expr, 0);
1293 stmt_ann_t ann = stmt_ann (stmt);
1295 /* Stores into INDIRECT_REF operands are never killing definitions. */
1296 flags &= ~opf_kill_def;
1298 if (REF_ORIGINAL (expr))
1300 enum tree_code ocode = TREE_CODE (REF_ORIGINAL (expr));
1302 /* If we originally accessed part of a structure, we do it still. */
1303 if (ocode == ARRAY_REF
1304 || ocode == COMPONENT_REF
1305 || ocode == REALPART_EXPR
1306 || ocode == IMAGPART_EXPR)
1307 flags &= ~opf_kill_def;
1310 if (SSA_VAR_P (ptr))
1312 struct ptr_info_def *pi = NULL;
1314 /* If PTR has flow-sensitive points-to information, use it. */
1315 if (TREE_CODE (ptr) == SSA_NAME
1316 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1317 && pi->name_mem_tag)
1319 /* PTR has its own memory tag. Use it. */
1320 add_stmt_operand (&pi->name_mem_tag, stmt, flags);
1324 /* If PTR is not an SSA_NAME or it doesn't have a name
1325 tag, use its type memory tag. */
1328 /* If we are emitting debugging dumps, display a warning if
1329 PTR is an SSA_NAME with no flow-sensitive alias
1330 information. That means that we may need to compute
1333 && TREE_CODE (ptr) == SSA_NAME
1337 "NOTE: no flow-sensitive alias info for ");
1338 print_generic_expr (dump_file, ptr, dump_flags);
1339 fprintf (dump_file, " in ");
1340 print_generic_stmt (dump_file, stmt, dump_flags);
1343 if (TREE_CODE (ptr) == SSA_NAME)
1344 ptr = SSA_NAME_VAR (ptr);
1345 ann = var_ann (ptr);
1346 if (ann->type_mem_tag)
1347 add_stmt_operand (&ann->type_mem_tag, stmt, flags);
1351 /* If a constant is used as a pointer, we can't generate a real
1352 operand for it but we mark the statement volatile to prevent
1353 optimizations from messing things up. */
1354 else if (TREE_CODE (ptr) == INTEGER_CST)
1357 ann->has_volatile_ops = true;
1361 /* Everything else *should* have been folded elsewhere, but users
1362 are smarter than we in finding ways to write invalid code. We
1363 cannot just abort here. If we were absolutely certain that we
1364 do handle all valid cases, then we could just do nothing here.
1365 That seems optimistic, so attempt to do something logical... */
1366 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1367 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1368 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1370 /* Make sure we know the object is addressable. */
1371 pptr = &TREE_OPERAND (ptr, 0);
1372 add_stmt_operand (pptr, stmt, 0);
1374 /* Mark the object itself with a VUSE. */
1375 pptr = &TREE_OPERAND (*pptr, 0);
1376 get_expr_operands (stmt, pptr, flags);
1380 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1384 /* Add a USE operand for the base pointer. */
1385 get_expr_operands (stmt, pptr, opf_none);
1388 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1391 get_call_expr_operands (tree stmt, tree expr)
1394 int call_flags = call_expr_flags (expr);
1396 /* Find uses in the called function. */
1397 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1399 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1400 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1402 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1404 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1406 /* A 'pure' or a 'const' functions never call clobber anything.
1407 A 'noreturn' function might, but since we don't return anyway
1408 there is no point in recording that. */
1409 if (TREE_SIDE_EFFECTS (expr)
1410 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1411 add_call_clobber_ops (stmt);
1412 else if (!(call_flags & ECF_CONST))
1413 add_call_read_ops (stmt);
1418 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1419 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1420 the statement's real operands, otherwise it is added to virtual
1424 add_stmt_operand (tree *var_p, tree stmt, int flags)
1428 stmt_ann_t s_ann = stmt_ann (stmt);
1434 /* If the operand is an ADDR_EXPR, add its operand to the list of
1435 variables that have had their address taken in this statement. */
1436 if (TREE_CODE (var) == ADDR_EXPR)
1438 note_addressable (TREE_OPERAND (var, 0), s_ann);
1442 /* If the original variable is not a scalar, it will be added to the list
1443 of virtual operands. In that case, use its base symbol as the virtual
1444 variable representing it. */
1445 is_real_op = is_gimple_reg (var);
1446 if (!is_real_op && !DECL_P (var))
1447 var = get_virtual_var (var);
1449 /* If VAR is not a variable that we care to optimize, do nothing. */
1450 if (var == NULL_TREE || !SSA_VAR_P (var))
1453 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1454 v_ann = var_ann (sym);
1456 /* Don't expose volatile variables to the optimizers. */
1457 if (TREE_THIS_VOLATILE (sym))
1460 s_ann->has_volatile_ops = true;
1466 /* The variable is a GIMPLE register. Add it to real operands. */
1467 if (flags & opf_is_def)
1474 varray_type aliases;
1476 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1477 virtual operands, unless the caller has specifically requested
1478 not to add virtual operands (used when adding operands inside an
1479 ADDR_EXPR expression). */
1480 if (flags & opf_no_vops)
1483 aliases = v_ann->may_aliases;
1485 if (aliases == NULL)
1487 /* The variable is not aliased or it is an alias tag. */
1488 if (flags & opf_is_def)
1490 if (v_ann->is_alias_tag)
1492 /* Alias tagged vars get V_MAY_DEF to avoid breaking
1493 def-def chains with the other variables in their
1496 s_ann->makes_aliased_stores = 1;
1497 append_v_may_def (var);
1499 else if (flags & opf_kill_def)
1501 #if defined ENABLE_CHECKING
1502 /* Only regular variables may get a V_MUST_DEF
1504 if (v_ann->mem_tag_kind != NOT_A_TAG)
1507 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1508 variable definitions. */
1509 append_v_must_def (var);
1513 /* Add a V_MAY_DEF for call-clobbered variables and
1515 append_v_may_def (var);
1521 if (s_ann && v_ann->is_alias_tag)
1522 s_ann->makes_aliased_loads = 1;
1529 /* The variable is aliased. Add its aliases to the virtual
1531 #if defined ENABLE_CHECKING
1532 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1536 if (flags & opf_is_def)
1538 /* If the variable is also an alias tag, add a virtual
1539 operand for it, otherwise we will miss representing
1540 references to the members of the variable's alias set.
1541 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1542 if (v_ann->is_alias_tag)
1543 append_v_may_def (var);
1545 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1546 append_v_may_def (VARRAY_TREE (aliases, i));
1549 s_ann->makes_aliased_stores = 1;
1553 /* Similarly, append a virtual uses for VAR itself, when
1554 it is an alias tag. */
1555 if (v_ann->is_alias_tag)
1558 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1559 append_vuse (VARRAY_TREE (aliases, i));
1562 s_ann->makes_aliased_loads = 1;
1569 /* Record that VAR had its address taken in the statement with annotations
1573 note_addressable (tree var, stmt_ann_t s_ann)
1578 var = get_base_address (var);
1579 if (var && SSA_VAR_P (var))
1581 if (s_ann->addresses_taken == NULL)
1582 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1583 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1588 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1589 clobbered variables in the function. */
1592 add_call_clobber_ops (tree stmt)
1594 /* Functions that are not const, pure or never return may clobber
1595 call-clobbered variables. */
1596 if (stmt_ann (stmt))
1597 stmt_ann (stmt)->makes_clobbering_call = true;
1599 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1600 a V_MAY_DEF operand for every call clobbered variable. See
1601 compute_may_aliases for the heuristic used to decide whether
1602 to create .GLOBAL_VAR or not. */
1604 add_stmt_operand (&global_var, stmt, opf_is_def);
1609 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1611 tree var = referenced_var (i);
1613 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1615 if (!TREE_READONLY (var))
1616 add_stmt_operand (&var, stmt, opf_is_def);
1618 add_stmt_operand (&var, stmt, opf_none);
1624 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1628 add_call_read_ops (tree stmt)
1630 /* Otherwise, if the function is not pure, it may reference memory. Add
1631 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1632 for each call-clobbered variable. See add_referenced_var for the
1633 heuristic used to decide whether to create .GLOBAL_VAR. */
1635 add_stmt_operand (&global_var, stmt, opf_none);
1640 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1642 tree var = referenced_var (i);
1643 add_stmt_operand (&var, stmt, opf_none);
1648 /* Copies virtual operands from SRC to DST. */
1651 copy_virtual_operands (tree dst, tree src)
1654 vuse_optype vuses = STMT_VUSE_OPS (src);
1655 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1656 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1657 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1658 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1659 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1663 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1664 for (i = 0; i < NUM_VUSES (vuses); i++)
1665 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1670 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1671 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1673 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1674 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1675 V_MAY_DEF_RESULT (v_may_defs, i));
1681 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1682 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1683 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1688 /* Specifically for use in DOM's expression analysis. Given a store, we
1689 create an artifical stmt which looks like a load from the store, this can
1690 be used to eliminate redundant loads. OLD_OPS are the operands from the
1691 store stmt, and NEW_STMT is the new load which reperesent a load of the
1695 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1699 stmt_operands_t tmp;
1702 memset (&tmp, 0, sizeof (stmt_operands_t));
1703 ann = get_stmt_ann (new_stmt);
1705 /* Free operands just in case is was an existing stmt. */
1706 free_ssa_operands (&(ann->operands));
1708 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1709 free_vuses (&(ann->operands.vuse_ops));
1710 free_v_may_defs (&(ann->operands.v_may_def_ops));
1711 free_v_must_defs (&(ann->operands.v_must_def_ops));
1713 /* For each VDEF on the original statement, we want to create a
1714 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1716 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1718 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1722 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1724 op = V_MUST_DEF_OP (old_ops->v_must_def_ops, j);
1728 /* Now set the vuses for this new stmt. */
1729 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1732 #include "gt-tree-ssa-operands.h"