1 /* SCC value numbering for trees
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
134 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
136 /* Unary operations in the hashtable consist of a single operand, an
137 opcode, and a type. Result is the value number of the operation,
138 and hashcode is stored to avoid having to calculate it repeatedly. */
140 typedef struct vn_unary_op_s
142 enum tree_code opcode;
148 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
150 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
151 arguments, and the basic block the phi is in. Result is the value
152 number of the operation, and hashcode is stored to avoid having to
153 calculate it repeatedly. Phi nodes not in the same block are never
154 considered equivalent. */
156 typedef struct vn_phi_s
158 VEC (tree, heap) *phiargs;
163 typedef const struct vn_phi_s *const_vn_phi_t;
165 /* Reference operands only exist in reference operations structures.
166 They consist of an opcode, type, and some number of operands. For
167 a given opcode, some, all, or none of the operands may be used.
168 The operands are there to store the information that makes up the
169 portion of the addressing calculation that opcode performs. */
171 typedef struct vn_reference_op_struct
173 enum tree_code opcode;
179 typedef vn_reference_op_s *vn_reference_op_t;
180 typedef const vn_reference_op_s *const_vn_reference_op_t;
182 DEF_VEC_O(vn_reference_op_s);
183 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
185 /* A reference operation in the hashtable is representation as a
186 collection of vuses, representing the memory state at the time of
187 the operation, and a collection of operands that make up the
188 addressing calculation. If two vn_reference_t's have the same set
189 of operands, they access the same memory location. We also store
190 the resulting value number, and the hashcode. The vuses are
191 always stored in order sorted by ssa name version. */
193 typedef struct vn_reference_s
195 VEC (tree, gc) *vuses;
196 VEC (vn_reference_op_s, heap) *operands;
200 typedef const struct vn_reference_s *const_vn_reference_t;
202 /* Valid hashtables storing information we have proven to be
205 static vn_tables_t valid_info;
207 /* Optimistic hashtables storing information we are making assumptions about
208 during iterations. */
210 static vn_tables_t optimistic_info;
212 /* PRE hashtables storing information about mapping from expressions to
215 static vn_tables_t pre_info;
217 /* Pointer to the set of hashtables that is currently being used.
218 Should always point to either the optimistic_info, or the
221 static vn_tables_t current_info;
224 /* Reverse post order index for each basic block. */
226 static int *rpo_numbers;
228 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
230 /* This represents the top of the VN lattice, which is the universal
235 /* Next DFS number and the stack for strongly connected component
238 static unsigned int next_dfs_num;
239 static VEC (tree, heap) *sccstack;
241 DEF_VEC_P(vn_ssa_aux_t);
242 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
244 /* Table of vn_ssa_aux_t's, one per ssa_name. */
246 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
248 /* Return the value numbering information for a given SSA name. */
253 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
254 SSA_NAME_VERSION (name));
257 /* Set the value numbering info for a given SSA name to a given
261 VN_INFO_SET (tree name, vn_ssa_aux_t value)
263 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
264 SSA_NAME_VERSION (name), value);
267 /* Get the value numbering info for a given SSA name, creating it if
268 it does not exist. */
271 VN_INFO_GET (tree name)
273 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
274 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
275 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
276 SSA_NAME_VERSION (name) + 1);
277 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
278 SSA_NAME_VERSION (name), newinfo);
283 /* Compare two reference operands P1 and P2 for equality. return true if
284 they are equal, and false otherwise. */
287 vn_reference_op_eq (const void *p1, const void *p2)
289 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
290 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
291 return vro1->opcode == vro2->opcode
292 && vro1->type == vro2->type
293 && expressions_equal_p (vro1->op0, vro2->op0)
294 && expressions_equal_p (vro1->op1, vro2->op1)
295 && expressions_equal_p (vro1->op2, vro2->op2);
298 /* Compute the hash for a reference operand VRO1 */
301 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
303 return iterative_hash_expr (vro1->op0, vro1->opcode)
304 + iterative_hash_expr (vro1->op1, vro1->opcode)
305 + iterative_hash_expr (vro1->op2, vro1->opcode);
308 /* Return the hashcode for a given reference operation P1. */
311 vn_reference_hash (const void *p1)
313 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
314 return vr1->hashcode;
317 /* Compute a hash for the reference operation VR1 and return it. */
319 static inline hashval_t
320 vn_reference_compute_hash (const vn_reference_t vr1)
322 hashval_t result = 0;
325 vn_reference_op_t vro;
327 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
328 result += iterative_hash_expr (v, 0);
329 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
330 result += vn_reference_op_compute_hash (vro);
335 /* Return true if reference operations P1 and P2 are equivalent. This
336 means they have the same set of operands and vuses. */
339 vn_reference_eq (const void *p1, const void *p2)
343 vn_reference_op_t vro;
345 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
346 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
348 if (vr1->vuses == vr2->vuses
349 && vr1->operands == vr2->operands)
352 /* Impossible for them to be equivalent if they have different
354 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
357 /* We require that address operands be canonicalized in a way that
358 two memory references will have the same operands if they are
360 if (VEC_length (vn_reference_op_s, vr1->operands)
361 != VEC_length (vn_reference_op_s, vr2->operands))
364 /* The memory state is more often different than the address of the
365 store/load, so check it first. */
366 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
368 if (VEC_index (tree, vr2->vuses, i) != v)
372 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
374 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
381 /* Place the vuses from STMT into *result */
384 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
392 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
393 VEC_safe_push (tree, gc, *result, vuse);
395 if (VEC_length (tree, *result) > 1)
396 sort_vuses (*result);
400 /* Copy the VUSE names in STMT into a vector, and return
404 copy_vuses_from_stmt (tree stmt)
406 VEC (tree, gc) *vuses = NULL;
408 vuses_to_vec (stmt, &vuses);
413 /* Place the vdefs from STMT into *result */
416 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
424 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
425 VEC_safe_push (tree, gc, *result, vdef);
427 if (VEC_length (tree, *result) > 1)
428 sort_vuses (*result);
431 /* Copy the names of vdef results in STMT into a vector, and return
434 static VEC (tree, gc) *
435 copy_vdefs_from_stmt (tree stmt)
437 VEC (tree, gc) *vdefs = NULL;
439 vdefs_to_vec (stmt, &vdefs);
444 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
445 static VEC (tree, gc) *shared_lookup_vops;
447 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
448 This function will overwrite the current SHARED_LOOKUP_VOPS
452 shared_vuses_from_stmt (tree stmt)
454 VEC_truncate (tree, shared_lookup_vops, 0);
455 vuses_to_vec (stmt, &shared_lookup_vops);
457 return shared_lookup_vops;
460 /* Copy the operations present in load/store/call REF into RESULT, a vector of
461 vn_reference_op_s's. */
464 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
466 /* Calls are different from all other reference operations. */
467 if (TREE_CODE (ref) == CALL_EXPR)
469 vn_reference_op_s temp;
471 call_expr_arg_iterator iter;
474 /* Copy the call_expr opcode, type, function being called, and
476 memset (&temp, 0, sizeof (temp));
477 temp.type = TREE_TYPE (ref);
478 temp.opcode = CALL_EXPR;
479 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
481 callfn = get_callee_fndecl (ref);
483 callfn = CALL_EXPR_FN (ref);
484 temp.type = TREE_TYPE (callfn);
485 temp.opcode = TREE_CODE (callfn);
487 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
489 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
491 memset (&temp, 0, sizeof (temp));
492 temp.type = TREE_TYPE (callarg);
493 temp.opcode = TREE_CODE (callarg);
495 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
500 /* For non-calls, store the information that makes up the address. */
504 vn_reference_op_s temp;
506 memset (&temp, 0, sizeof (temp));
507 temp.type = TREE_TYPE (ref);
508 temp.opcode = TREE_CODE (ref);
512 case ALIGN_INDIRECT_REF:
513 case MISALIGNED_INDIRECT_REF:
515 /* The only operand is the address, which gets its own
516 vn_reference_op_s structure. */
519 /* Record bits and position. */
520 temp.op0 = TREE_OPERAND (ref, 1);
521 temp.op1 = TREE_OPERAND (ref, 2);
524 /* Record field as operand. */
525 temp.op0 = TREE_OPERAND (ref, 1);
527 case ARRAY_RANGE_REF:
529 /* Record index as operand. */
530 temp.op0 = TREE_OPERAND (ref, 1);
531 temp.op1 = TREE_OPERAND (ref, 3);
546 /* These are only interesting for their operands, their
547 existence, and their type. They will never be the last
548 ref in the chain of references (IE they require an
549 operand), so we don't have to put anything
550 for op* as it will be handled by the iteration */
553 case VIEW_CONVERT_EXPR:
560 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
562 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
563 ref = TREE_OPERAND (ref, 0);
569 /* Create a vector of vn_reference_op_s structures from REF, a
570 REFERENCE_CLASS_P tree. The vector is not shared. */
572 static VEC(vn_reference_op_s, heap) *
573 create_reference_ops_from_ref (tree ref)
575 VEC (vn_reference_op_s, heap) *result = NULL;
577 copy_reference_ops_from_ref (ref, &result);
581 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
583 /* Create a vector of vn_reference_op_s structures from REF, a
584 REFERENCE_CLASS_P tree. The vector is shared among all callers of
587 static VEC(vn_reference_op_s, heap) *
588 shared_reference_ops_from_ref (tree ref)
592 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
593 copy_reference_ops_from_ref (ref, &shared_lookup_references);
594 return shared_lookup_references;
598 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
599 structures into their value numbers. This is done in-place, and
600 the vector passed in is returned. */
602 static VEC (vn_reference_op_s, heap) *
603 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
605 vn_reference_op_t vro;
608 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
610 if (vro->opcode == SSA_NAME
611 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
612 vro->op0 = SSA_VAL (vro->op0);
618 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
619 their value numbers. This is done in-place, and the vector passed
622 static VEC (tree, gc) *
623 valueize_vuses (VEC (tree, gc) *orig)
625 bool made_replacement = false;
629 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
631 if (vuse != SSA_VAL (vuse))
633 made_replacement = true;
634 VEC_replace (tree, orig, i, SSA_VAL (vuse));
638 if (made_replacement && VEC_length (tree, orig) > 1)
644 /* Lookup OP in the current hash table, and return the resulting
645 value number if it exists in the hash table. Return NULL_TREE if
646 it does not exist in the hash table. */
649 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
652 struct vn_reference_s vr1;
654 vr1.vuses = valueize_vuses (vuses);
655 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
656 vr1.hashcode = vn_reference_compute_hash (&vr1);
657 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
662 return ((vn_reference_t)*slot)->result;
665 /* Insert OP into the current hash table with a value number of
669 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
674 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
676 vr1->vuses = valueize_vuses (vuses);
677 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
678 vr1->hashcode = vn_reference_compute_hash (vr1);
679 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
681 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
684 /* Because we lookup stores using vuses, and value number failures
685 using the vdefs (see visit_reference_op_store for how and why),
686 it's possible that on failure we may try to insert an already
687 inserted store. This is not wrong, there is no ssa name for a
688 store that we could use as a differentiator anyway. Thus, unlike
689 the other lookup functions, you cannot gcc_assert (!*slot)
697 /* Return the stored hashcode for a unary operation. */
700 vn_unary_op_hash (const void *p1)
702 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
703 return vuo1->hashcode;
706 /* Hash a unary operation P1 and return the result. */
708 static inline hashval_t
709 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
711 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
714 /* Return true if P1 and P2, two unary operations, are equivalent. */
717 vn_unary_op_eq (const void *p1, const void *p2)
719 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
720 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
721 return vuo1->opcode == vuo2->opcode
722 && vuo1->type == vuo2->type
723 && expressions_equal_p (vuo1->op0, vuo2->op0);
726 /* Lookup OP in the current hash table, and return the resulting
727 value number if it exists in the hash table. Return NULL_TREE if
728 it does not exist in the hash table. */
731 vn_unary_op_lookup (tree op)
734 struct vn_unary_op_s vuo1;
736 vuo1.opcode = TREE_CODE (op);
737 vuo1.type = TREE_TYPE (op);
738 vuo1.op0 = TREE_OPERAND (op, 0);
740 if (TREE_CODE (vuo1.op0) == SSA_NAME)
741 vuo1.op0 = SSA_VAL (vuo1.op0);
743 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
744 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
748 return ((vn_unary_op_t)*slot)->result;
751 /* Insert OP into the current hash table with a value number of
755 vn_unary_op_insert (tree op, tree result)
758 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
760 vuo1->opcode = TREE_CODE (op);
761 vuo1->type = TREE_TYPE (op);
762 vuo1->op0 = TREE_OPERAND (op, 0);
763 vuo1->result = result;
765 if (TREE_CODE (vuo1->op0) == SSA_NAME)
766 vuo1->op0 = SSA_VAL (vuo1->op0);
768 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
769 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
775 /* Compute and return the hash value for binary operation VBO1. */
777 static inline hashval_t
778 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
780 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
781 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
784 /* Return the computed hashcode for binary operation P1. */
787 vn_binary_op_hash (const void *p1)
789 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
790 return vbo1->hashcode;
793 /* Compare binary operations P1 and P2 and return true if they are
797 vn_binary_op_eq (const void *p1, const void *p2)
799 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
800 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
801 return vbo1->opcode == vbo2->opcode
802 && vbo1->type == vbo2->type
803 && expressions_equal_p (vbo1->op0, vbo2->op0)
804 && expressions_equal_p (vbo1->op1, vbo2->op1);
807 /* Lookup OP in the current hash table, and return the resulting
808 value number if it exists in the hash table. Return NULL_TREE if
809 it does not exist in the hash table. */
812 vn_binary_op_lookup (tree op)
815 struct vn_binary_op_s vbo1;
817 vbo1.opcode = TREE_CODE (op);
818 vbo1.type = TREE_TYPE (op);
819 vbo1.op0 = TREE_OPERAND (op, 0);
820 vbo1.op1 = TREE_OPERAND (op, 1);
822 if (TREE_CODE (vbo1.op0) == SSA_NAME)
823 vbo1.op0 = SSA_VAL (vbo1.op0);
824 if (TREE_CODE (vbo1.op1) == SSA_NAME)
825 vbo1.op1 = SSA_VAL (vbo1.op1);
827 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
828 && commutative_tree_code (vbo1.opcode))
830 tree temp = vbo1.op0;
835 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
836 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
840 return ((vn_binary_op_t)*slot)->result;
843 /* Insert OP into the current hash table with a value number of
847 vn_binary_op_insert (tree op, tree result)
851 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
853 vbo1->opcode = TREE_CODE (op);
854 vbo1->type = TREE_TYPE (op);
855 vbo1->op0 = TREE_OPERAND (op, 0);
856 vbo1->op1 = TREE_OPERAND (op, 1);
857 vbo1->result = result;
859 if (TREE_CODE (vbo1->op0) == SSA_NAME)
860 vbo1->op0 = SSA_VAL (vbo1->op0);
861 if (TREE_CODE (vbo1->op1) == SSA_NAME)
862 vbo1->op1 = SSA_VAL (vbo1->op1);
864 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
865 && commutative_tree_code (vbo1->opcode))
867 tree temp = vbo1->op0;
868 vbo1->op0 = vbo1->op1;
871 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
872 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
879 /* Compute a hashcode for PHI operation VP1 and return it. */
881 static inline hashval_t
882 vn_phi_compute_hash (vn_phi_t vp1)
884 hashval_t result = 0;
888 result = vp1->block->index;
890 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
892 if (phi1op == VN_TOP)
894 result += iterative_hash_expr (phi1op, result);
900 /* Return the computed hashcode for phi operation P1. */
903 vn_phi_hash (const void *p1)
905 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
906 return vp1->hashcode;
909 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
912 vn_phi_eq (const void *p1, const void *p2)
914 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
915 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
917 if (vp1->block == vp2->block)
922 /* Any phi in the same block will have it's arguments in the
923 same edge order, because of how we store phi nodes. */
924 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
926 tree phi2op = VEC_index (tree, vp2->phiargs, i);
927 if (phi1op == VN_TOP || phi2op == VN_TOP)
929 if (!expressions_equal_p (phi1op, phi2op))
937 static VEC(tree, heap) *shared_lookup_phiargs;
939 /* Lookup PHI in the current hash table, and return the resulting
940 value number if it exists in the hash table. Return NULL_TREE if
941 it does not exist in the hash table. */
944 vn_phi_lookup (tree phi)
950 VEC_truncate (tree, shared_lookup_phiargs, 0);
952 /* Canonicalize the SSA_NAME's to their value number. */
953 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
955 tree def = PHI_ARG_DEF (phi, i);
956 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
957 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
959 vp1.phiargs = shared_lookup_phiargs;
960 vp1.block = bb_for_stmt (phi);
961 vp1.hashcode = vn_phi_compute_hash (&vp1);
962 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
966 return ((vn_phi_t)*slot)->result;
969 /* Insert PHI into the current hash table with a value number of
973 vn_phi_insert (tree phi, tree result)
976 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
978 VEC (tree, heap) *args = NULL;
980 /* Canonicalize the SSA_NAME's to their value number. */
981 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
983 tree def = PHI_ARG_DEF (phi, i);
984 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
985 VEC_safe_push (tree, heap, args, def);
988 vp1->block = bb_for_stmt (phi);
989 vp1->result = result;
990 vp1->hashcode = vn_phi_compute_hash (vp1);
992 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
995 /* Because we iterate over phi operations more than once, it's
996 possible the slot might already exist here, hence no assert.*/
1001 /* Print set of components in strongly connected component SCC to OUT. */
1004 print_scc (FILE *out, VEC (tree, heap) *scc)
1009 fprintf (out, "SCC consists of: ");
1010 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1012 print_generic_expr (out, var, 0);
1015 fprintf (out, "\n");
1018 /* Set the value number of FROM to TO, return true if it has changed
1022 set_ssa_val_to (tree from, tree to)
1026 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1027 and invariants. So assert that here. */
1028 gcc_assert (to != NULL_TREE
1030 || TREE_CODE (to) == SSA_NAME
1031 || is_gimple_min_invariant (to)));
1033 if (dump_file && (dump_flags & TDF_DETAILS))
1035 fprintf (dump_file, "Setting value number of ");
1036 print_generic_expr (dump_file, from, 0);
1037 fprintf (dump_file, " to ");
1038 print_generic_expr (dump_file, to, 0);
1039 fprintf (dump_file, "\n");
1042 currval = SSA_VAL (from);
1044 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1046 SSA_VAL (from) = to;
1052 /* Set all definitions in STMT to value number to themselves.
1053 Return true if a value number changed. */
1056 defs_to_varying (tree stmt)
1058 bool changed = false;
1062 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1064 tree def = DEF_FROM_PTR (defp);
1066 VN_INFO (def)->use_processed = true;
1067 changed |= set_ssa_val_to (def, def);
1072 /* Visit a copy between LHS and RHS, return true if the value number
1076 visit_copy (tree lhs, tree rhs)
1079 /* Follow chains of copies to their destination. */
1080 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1081 rhs = SSA_VAL (rhs);
1083 /* The copy may have a more interesting constant filled expression
1084 (we don't, since we know our RHS is just an SSA name). */
1085 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1086 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1088 return set_ssa_val_to (lhs, rhs);
1091 /* Visit a unary operator RHS, value number it, and return true if the
1092 value number of LHS has changed as a result. */
1095 visit_unary_op (tree lhs, tree op)
1097 bool changed = false;
1098 tree result = vn_unary_op_lookup (op);
1102 changed = set_ssa_val_to (lhs, result);
1106 changed = set_ssa_val_to (lhs, lhs);
1107 vn_unary_op_insert (op, lhs);
1113 /* Visit a binary operator RHS, value number it, and return true if the
1114 value number of LHS has changed as a result. */
1117 visit_binary_op (tree lhs, tree op)
1119 bool changed = false;
1120 tree result = vn_binary_op_lookup (op);
1124 changed = set_ssa_val_to (lhs, result);
1128 changed = set_ssa_val_to (lhs, lhs);
1129 vn_binary_op_insert (op, lhs);
1135 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1136 and return true if the value number of the LHS has changed as a result. */
1139 visit_reference_op_load (tree lhs, tree op, tree stmt)
1141 bool changed = false;
1142 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1146 changed = set_ssa_val_to (lhs, result);
1150 changed = set_ssa_val_to (lhs, lhs);
1151 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1158 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1159 and return true if the value number of the LHS has changed as a result. */
1162 visit_reference_op_store (tree lhs, tree op, tree stmt)
1164 bool changed = false;
1166 bool resultsame = false;
1168 /* First we want to lookup using the *vuses* from the store and see
1169 if there the last store to this location with the same address
1172 The vuses represent the memory state before the store. If the
1173 memory state, address, and value of the store is the same as the
1174 last store to this location, then this store will produce the
1175 same memory state as that store.
1177 In this case the vdef versions for this store are value numbered to those
1178 vuse versions, since they represent the same memory state after
1181 Otherwise, the vdefs for the store are used when inserting into
1182 the table, since the store generates a new memory state. */
1184 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1188 if (TREE_CODE (result) == SSA_NAME)
1189 result = SSA_VAL (result);
1190 resultsame = expressions_equal_p (result, op);
1193 if (!result || !resultsame)
1195 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1199 if (dump_file && (dump_flags & TDF_DETAILS))
1201 fprintf (dump_file, "No store match\n");
1202 fprintf (dump_file, "Value numbering store ");
1203 print_generic_expr (dump_file, lhs, 0);
1204 fprintf (dump_file, " to ");
1205 print_generic_expr (dump_file, op, 0);
1206 fprintf (dump_file, "\n");
1208 /* Have to set value numbers before insert, since insert is
1209 going to valueize the references in-place. */
1210 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1212 VN_INFO (vdef)->use_processed = true;
1213 changed |= set_ssa_val_to (vdef, vdef);
1216 vn_reference_insert (lhs, op, vdefs);
1220 /* We had a match, so value number the vdefs to have the value
1221 number of the vuses they came from. */
1222 ssa_op_iter op_iter;
1226 if (dump_file && (dump_flags & TDF_DETAILS))
1227 fprintf (dump_file, "Store matched earlier value,"
1228 "value numbering store vdefs to matching vuses.\n");
1230 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1232 tree def = DEF_FROM_PTR (var);
1235 /* Uh, if the vuse is a multiuse, we can't really do much
1236 here, sadly, since we don't know which value number of
1237 which vuse to use. */
1238 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1241 use = VUSE_ELEMENT_VAR (*vv, 0);
1243 VN_INFO (def)->use_processed = true;
1244 changed |= set_ssa_val_to (def, SSA_VAL (use));
1251 /* Visit and value number PHI, return true if the value number
1255 visit_phi (tree phi)
1257 bool changed = false;
1259 tree sameval = VN_TOP;
1260 bool allsame = true;
1263 /* See if all non-TOP arguments have the same value. TOP is
1264 equivalent to everything, so we can ignore it. */
1265 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1267 tree def = PHI_ARG_DEF (phi, i);
1269 if (TREE_CODE (def) == SSA_NAME)
1270 def = SSA_VAL (def);
1273 if (sameval == VN_TOP)
1279 if (!expressions_equal_p (def, sameval))
1287 /* If all value numbered to the same value, the phi node has that
1291 if (is_gimple_min_invariant (sameval))
1293 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1294 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1298 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1299 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1302 if (TREE_CODE (sameval) == SSA_NAME)
1303 return visit_copy (PHI_RESULT (phi), sameval);
1305 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1308 /* Otherwise, see if it is equivalent to a phi node in this block. */
1309 result = vn_phi_lookup (phi);
1312 if (TREE_CODE (result) == SSA_NAME)
1313 changed = visit_copy (PHI_RESULT (phi), result);
1315 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1319 vn_phi_insert (phi, PHI_RESULT (phi));
1320 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1321 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1322 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1328 /* Return true if EXPR contains constants. */
1331 expr_has_constants (tree expr)
1333 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1336 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1339 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1340 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1341 /* Constants inside reference ops are rarely interesting, but
1342 it can take a lot of looking to find them. */
1344 case tcc_declaration:
1347 return is_gimple_min_invariant (expr);
1352 /* Replace SSA_NAMES in expr with their value numbers, and return the
1354 This is performed in place. */
1357 valueize_expr (tree expr)
1359 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1362 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1363 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1364 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1367 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1368 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1369 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1370 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1371 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1372 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1380 /* Simplify the binary expression RHS, and return the result if
1384 simplify_binary_expression (tree rhs)
1386 tree result = NULL_TREE;
1387 tree op0 = TREE_OPERAND (rhs, 0);
1388 tree op1 = TREE_OPERAND (rhs, 1);
1390 /* This will not catch every single case we could combine, but will
1391 catch those with constants. The goal here is to simultaneously
1392 combine constants between expressions, but avoid infinite
1393 expansion of expressions during simplification. */
1394 if (TREE_CODE (op0) == SSA_NAME)
1396 if (VN_INFO (op0)->has_constants)
1397 op0 = valueize_expr (VN_INFO (op0)->expr);
1398 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1399 op0 = SSA_VAL (op0);
1402 if (TREE_CODE (op1) == SSA_NAME)
1404 if (VN_INFO (op1)->has_constants)
1405 op1 = valueize_expr (VN_INFO (op1)->expr);
1406 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1407 op1 = SSA_VAL (op1);
1410 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1412 /* Make sure result is not a complex expression consisting
1413 of operators of operators (IE (a + b) + (a + c))
1414 Otherwise, we will end up with unbounded expressions if
1415 fold does anything at all. */
1416 if (result && valid_gimple_expression_p (result))
1422 /* Try to simplify RHS using equivalences and constant folding. */
1425 try_to_simplify (tree stmt, tree rhs)
1427 if (TREE_CODE (rhs) == SSA_NAME)
1429 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1430 return SSA_VAL (rhs);
1431 else if (VN_INFO (rhs)->has_constants)
1432 return VN_INFO (rhs)->expr;
1436 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1438 /* For references, see if we find a result for the lookup,
1439 and use it if we do. */
1440 case tcc_declaration:
1441 /* Pull out any truly constant values. */
1442 if (TREE_READONLY (rhs)
1443 && TREE_STATIC (rhs)
1444 && DECL_INITIAL (rhs)
1445 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1446 return DECL_INITIAL (rhs);
1451 tree result = vn_reference_lookup (rhs,
1452 shared_vuses_from_stmt (stmt));
1457 /* We could do a little more with unary ops, if they expand
1458 into binary ops, but it's debatable whether it is worth it. */
1461 tree result = NULL_TREE;
1462 tree op0 = TREE_OPERAND (rhs, 0);
1463 if (TREE_CODE (op0) == SSA_NAME && VN_INFO (op0)->has_constants)
1464 op0 = VN_INFO (op0)->expr;
1465 else if (TREE_CODE (op0) == SSA_NAME && SSA_VAL (op0) != op0)
1466 op0 = SSA_VAL (op0);
1467 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1472 case tcc_comparison:
1474 return simplify_binary_expression (rhs);
1483 /* Visit and value number USE, return true if the value number
1487 visit_use (tree use)
1489 bool changed = false;
1490 tree stmt = SSA_NAME_DEF_STMT (use);
1493 VN_INFO (use)->use_processed = true;
1495 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1496 if (dump_file && (dump_flags & TDF_DETAILS))
1498 fprintf (dump_file, "Value numbering ");
1499 print_generic_expr (dump_file, use, 0);
1500 fprintf (dump_file, " stmt = ");
1501 print_generic_stmt (dump_file, stmt, 0);
1504 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1505 if (TREE_CODE (stmt) == RETURN_EXPR
1506 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1507 stmt = TREE_OPERAND (stmt, 0);
1509 ann = stmt_ann (stmt);
1511 /* Handle uninitialized uses. */
1512 if (IS_EMPTY_STMT (stmt))
1514 changed = set_ssa_val_to (use, use);
1518 if (TREE_CODE (stmt) == PHI_NODE)
1520 changed = visit_phi (stmt);
1522 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1523 || (ann && ann->has_volatile_ops))
1525 changed = defs_to_varying (stmt);
1529 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1530 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1533 STRIP_USELESS_TYPE_CONVERSION (rhs);
1535 /* Shortcut for copies. Simplifying copies is pointless,
1536 since we copy the expression and value they represent. */
1537 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1539 changed = visit_copy (lhs, rhs);
1542 simplified = try_to_simplify (stmt, rhs);
1543 if (simplified && simplified != rhs)
1545 if (dump_file && (dump_flags & TDF_DETAILS))
1547 fprintf (dump_file, "RHS ");
1548 print_generic_expr (dump_file, rhs, 0);
1549 fprintf (dump_file, " simplified to ");
1550 print_generic_expr (dump_file, simplified, 0);
1551 if (TREE_CODE (lhs) == SSA_NAME)
1552 fprintf (dump_file, " has constants %d\n",
1553 VN_INFO (lhs)->has_constants);
1555 fprintf (dump_file, "\n");
1559 /* Setting value numbers to constants will occasionally
1560 screw up phi congruence because constants are not
1561 uniquely associated with a single ssa name that can be
1563 if (simplified && is_gimple_min_invariant (simplified)
1564 && TREE_CODE (lhs) == SSA_NAME
1565 && simplified != rhs)
1567 VN_INFO (lhs)->expr = simplified;
1568 VN_INFO (lhs)->has_constants = true;
1569 changed = set_ssa_val_to (lhs, simplified);
1572 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1573 && TREE_CODE (lhs) == SSA_NAME)
1575 changed = visit_copy (lhs, simplified);
1578 else if (simplified)
1580 if (TREE_CODE (lhs) == SSA_NAME)
1582 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1583 /* We have to unshare the expression or else
1584 valuizing may change the IL stream. */
1585 VN_INFO (lhs)->expr = unshare_expr (simplified);
1589 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1591 VN_INFO (lhs)->has_constants = true;
1592 VN_INFO (lhs)->expr = unshare_expr (rhs);
1594 else if (TREE_CODE (lhs) == SSA_NAME)
1596 /* We reset expr and constantness here because we may
1597 have been value numbering optimistically, and
1598 iterating. They may become non-constant in this case,
1599 even if they were optimistically constant. */
1601 VN_INFO (lhs)->has_constants = false;
1602 VN_INFO (lhs)->expr = lhs;
1605 if (TREE_CODE (lhs) == SSA_NAME
1606 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1607 changed = defs_to_varying (stmt);
1608 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1610 changed = visit_reference_op_store (lhs, rhs, stmt);
1612 else if (TREE_CODE (lhs) == SSA_NAME)
1614 if (is_gimple_min_invariant (rhs))
1616 VN_INFO (lhs)->has_constants = true;
1617 VN_INFO (lhs)->expr = rhs;
1618 changed = set_ssa_val_to (lhs, rhs);
1622 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1625 changed = visit_unary_op (lhs, rhs);
1628 changed = visit_binary_op (lhs, rhs);
1630 /* If tcc_vl_expr ever encompasses more than
1631 CALL_EXPR, this will need to be changed. */
1633 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1634 changed = visit_reference_op_load (lhs, rhs, stmt);
1636 changed = defs_to_varying (stmt);
1638 case tcc_declaration:
1640 changed = visit_reference_op_load (lhs, rhs, stmt);
1642 case tcc_expression:
1643 if (TREE_CODE (rhs) == ADDR_EXPR)
1645 changed = visit_unary_op (lhs, rhs);
1650 changed = defs_to_varying (stmt);
1656 changed = defs_to_varying (stmt);
1663 /* Compare two operands by reverse postorder index */
1666 compare_ops (const void *pa, const void *pb)
1668 const tree opa = *((const tree *)pa);
1669 const tree opb = *((const tree *)pb);
1670 tree opstmta = SSA_NAME_DEF_STMT (opa);
1671 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1675 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1677 else if (IS_EMPTY_STMT (opstmta))
1679 else if (IS_EMPTY_STMT (opstmtb))
1682 bba = bb_for_stmt (opstmta);
1683 bbb = bb_for_stmt (opstmtb);
1694 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1696 else if (TREE_CODE (opstmta) == PHI_NODE)
1698 else if (TREE_CODE (opstmtb) == PHI_NODE)
1700 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1702 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1705 /* Sort an array containing members of a strongly connected component
1706 SCC so that the members are ordered by RPO number.
1707 This means that when the sort is complete, iterating through the
1708 array will give you the members in RPO order. */
1711 sort_scc (VEC (tree, heap) *scc)
1713 qsort (VEC_address (tree, scc),
1714 VEC_length (tree, scc),
1719 /* Process a strongly connected component in the SSA graph. */
1722 process_scc (VEC (tree, heap) *scc)
1724 /* If the SCC has a single member, just visit it. */
1726 if (VEC_length (tree, scc) == 1)
1728 tree use = VEC_index (tree, scc, 0);
1729 if (!VN_INFO (use)->use_processed)
1736 unsigned int iterations = 0;
1737 bool changed = true;
1739 /* Iterate over the SCC with the optimistic table until it stops
1741 current_info = optimistic_info;
1746 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1747 changed |= visit_use (var);
1750 if (dump_file && (dump_flags & TDF_STATS))
1751 fprintf (dump_file, "Processing SCC required %d iterations\n",
1754 /* Finally, visit the SCC once using the valid table. */
1755 current_info = valid_info;
1756 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1761 /* Depth first search on NAME to discover and process SCC's in the SSA
1763 Execution of this algorithm relies on the fact that the SCC's are
1764 popped off the stack in topological order. */
1774 VN_INFO (name)->dfsnum = next_dfs_num++;
1775 VN_INFO (name)->visited = true;
1776 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1778 VEC_safe_push (tree, heap, sccstack, name);
1779 VN_INFO (name)->on_sccstack = true;
1780 defstmt = SSA_NAME_DEF_STMT (name);
1782 /* Recursively DFS on our operands, looking for SCC's. */
1783 if (!IS_EMPTY_STMT (defstmt))
1785 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1788 tree use = USE_FROM_PTR (usep);
1790 /* Since we handle phi nodes, we will sometimes get
1791 invariants in the use expression. */
1792 if (TREE_CODE (use) != SSA_NAME)
1795 if (! (VN_INFO (use)->visited))
1798 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1799 VN_INFO (use)->low);
1801 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1802 && VN_INFO (use)->on_sccstack)
1804 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1805 VN_INFO (name)->low);
1810 /* See if we found an SCC. */
1811 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1813 VEC (tree, heap) *scc = NULL;
1816 /* Found an SCC, pop the components off the SCC stack and
1820 x = VEC_pop (tree, sccstack);
1822 VN_INFO (x)->on_sccstack = false;
1823 VEC_safe_push (tree, heap, scc, x);
1824 } while (x != name);
1826 if (VEC_length (tree, scc) > 1)
1829 if (dump_file && (dump_flags & TDF_DETAILS))
1830 print_scc (dump_file, scc);
1834 VEC_free (tree, heap, scc);
1842 VEC_free (tree, heap, phi->phiargs);
1846 /* Free a reference operation structure VP. */
1849 free_reference (void *vp)
1851 vn_reference_t vr = vp;
1852 VEC_free (vn_reference_op_s, heap, vr->operands);
1855 /* Allocate a value number table. */
1858 allocate_vn_table (vn_tables_t table)
1860 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1861 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1862 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1863 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1866 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1867 sizeof (struct vn_unary_op_s),
1869 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1870 sizeof (struct vn_binary_op_s),
1872 table->phis_pool = create_alloc_pool ("VN phis",
1873 sizeof (struct vn_phi_s),
1875 table->references_pool = create_alloc_pool ("VN references",
1876 sizeof (struct vn_reference_s),
1880 /* Free a value number table. */
1883 free_vn_table (vn_tables_t table)
1885 htab_delete (table->phis);
1886 htab_delete (table->unary);
1887 htab_delete (table->binary);
1888 htab_delete (table->references);
1889 free_alloc_pool (table->unary_op_pool);
1890 free_alloc_pool (table->binary_op_pool);
1891 free_alloc_pool (table->phis_pool);
1892 free_alloc_pool (table->references_pool);
1900 int *rpo_numbers_temp;
1904 calculate_dominance_info (CDI_DOMINATORS);
1908 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1909 /* VEC_alloc doesn't actually grow it to the right size, it just
1910 preallocates the space to do so. */
1911 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1912 shared_lookup_phiargs = NULL;
1913 shared_lookup_vops = NULL;
1914 shared_lookup_references = NULL;
1915 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1916 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1917 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1919 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1920 the i'th block in RPO order is bb. We want to map bb's to RPO
1921 numbers, so we need to rearrange this array. */
1922 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1923 rpo_numbers[rpo_numbers_temp[j]] = j;
1925 free (rpo_numbers_temp);
1927 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1929 /* Create the VN_INFO structures, and initialize value numbers to
1931 for (i = 0; i < num_ssa_names; i++)
1933 tree name = ssa_name (i);
1936 VN_INFO_GET (name)->valnum = VN_TOP;
1937 VN_INFO (name)->expr = name;
1943 block_stmt_iterator bsi;
1944 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1946 tree stmt = bsi_stmt (bsi);
1947 stmt_ann (stmt)->uid = id++;
1951 /* Create the valid and optimistic value numbering tables. */
1952 valid_info = XCNEW (struct vn_tables_s);
1953 allocate_vn_table (valid_info);
1954 optimistic_info = XCNEW (struct vn_tables_s);
1955 allocate_vn_table (optimistic_info);
1960 switch_to_PRE_table (void)
1962 pre_info = XCNEW (struct vn_tables_s);
1963 allocate_vn_table (pre_info);
1964 current_info = pre_info;
1972 VEC_free (tree, heap, shared_lookup_phiargs);
1973 VEC_free (tree, gc, shared_lookup_vops);
1974 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
1975 XDELETEVEC (rpo_numbers);
1976 for (i = 0; i < num_ssa_names; i++)
1978 tree name = ssa_name (i);
1981 XDELETE (VN_INFO (name));
1982 if (SSA_NAME_VALUE (name) &&
1983 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
1984 SSA_NAME_VALUE (name) = NULL;
1988 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
1989 VEC_free (tree, heap, sccstack);
1990 free_vn_table (valid_info);
1991 XDELETE (valid_info);
1992 free_vn_table (optimistic_info);
1993 XDELETE (optimistic_info);
1996 free_vn_table (pre_info);
2008 current_info = valid_info;
2010 for (param = DECL_ARGUMENTS (current_function_decl);
2012 param = TREE_CHAIN (param))
2014 if (gimple_default_def (cfun, param) != NULL)
2016 tree def = gimple_default_def (cfun, param);
2017 SSA_VAL (def) = def;
2021 for (i = num_ssa_names - 1; i > 0; i--)
2023 tree name = ssa_name (i);
2025 && VN_INFO (name)->visited == false
2026 && !has_zero_uses (name))
2030 if (dump_file && (dump_flags & TDF_DETAILS))
2032 fprintf (dump_file, "Value numbers:\n");
2033 for (i = 0; i < num_ssa_names; i++)
2035 tree name = ssa_name (i);
2036 if (name && VN_INFO (name)->visited
2037 && (SSA_VAL (name) != name
2038 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2040 print_generic_expr (dump_file, name, 0);
2041 fprintf (dump_file, " = ");
2042 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2043 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2045 print_generic_expr (dump_file, SSA_VAL (name), 0);
2046 fprintf (dump_file, "\n");