1 /* SCC value numbering for trees
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
135 /* Unary operations in the hashtable consist of a single operand, an
136 opcode, and a type. Result is the value number of the operation,
137 and hashcode is stored to avoid having to calculate it repeatedly. */
139 typedef struct vn_unary_op_s
141 enum tree_code opcode;
148 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
149 arguments, and the basic block the phi is in. Result is the value
150 number of the operation, and hashcode is stored to avoid having to
151 calculate it repeatedly. Phi nodes not in the same block are never
152 considered equivalent. */
154 typedef struct vn_phi_s
156 VEC (tree, heap) *phiargs;
162 /* Reference operands only exist in reference operations structures.
163 They consist of an opcode, type, and some number of operands. For
164 a given opcode, some, all, or none of the operands may be used.
165 The operands are there to store the information that makes up the
166 portion of the addressing calculation that opcode performs. */
168 typedef struct vn_reference_op_struct
170 enum tree_code opcode;
176 typedef vn_reference_op_s *vn_reference_op_t;
178 DEF_VEC_O(vn_reference_op_s);
179 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
181 /* A reference operation in the hashtable is representation as a
182 collection of vuses, representing the memory state at the time of
183 the operation, and a collection of operands that make up the
184 addressing calculation. If two vn_reference_t's have the same set
185 of operands, they access the same memory location. We also store
186 the resulting value number, and the hashcode. The vuses are
187 always stored in order sorted by ssa name version. */
189 typedef struct vn_reference_s
191 VEC (tree, gc) *vuses;
192 VEC (vn_reference_op_s, heap) *operands;
197 /* Valid hashtables storing information we have proven to be
200 static vn_tables_t valid_info;
202 /* Optimistic hashtables storing information we are making assumptions about
203 during iterations. */
205 static vn_tables_t optimistic_info;
207 /* PRE hashtables storing information about mapping from expressions to
210 static vn_tables_t pre_info;
212 /* Pointer to the set of hashtables that is currently being used.
213 Should always point to either the optimistic_info, or the
216 static vn_tables_t current_info;
219 /* Reverse post order index for each basic block. */
221 static int *rpo_numbers;
223 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
225 /* This represents the top of the VN lattice, which is the universal
230 /* Next DFS number and the stack for strongly connected component
233 static unsigned int next_dfs_num;
234 static VEC (tree, heap) *sccstack;
236 DEF_VEC_P(vn_ssa_aux_t);
237 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
239 /* Table of vn_ssa_aux_t's, one per ssa_name. */
241 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
243 /* Return the value numbering information for a given SSA name. */
248 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
249 SSA_NAME_VERSION (name));
252 /* Set the value numbering info for a given SSA name to a given
256 VN_INFO_SET (tree name, vn_ssa_aux_t value)
258 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
259 SSA_NAME_VERSION (name), value);
262 /* Get the value numbering info for a given SSA name, creating it if
263 it does not exist. */
266 VN_INFO_GET (tree name)
268 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
269 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
270 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
271 SSA_NAME_VERSION (name) + 1);
272 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
273 SSA_NAME_VERSION (name), newinfo);
278 /* Compare two reference operands P1 and P2 for equality. return true if
279 they are equal, and false otherwise. */
282 vn_reference_op_eq (const void *p1, const void *p2)
284 const vn_reference_op_t vro1 = (vn_reference_op_t) p1;
285 const vn_reference_op_t vro2 = (vn_reference_op_t) p2;
286 return vro1->opcode == vro2->opcode
287 && vro1->type == vro2->type
288 && expressions_equal_p (vro1->op0, vro2->op0)
289 && expressions_equal_p (vro1->op1, vro2->op1)
290 && expressions_equal_p (vro1->op2, vro2->op2);
293 /* Compute the hash for a reference operand VRO1 */
296 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
298 return iterative_hash_expr (vro1->op0, vro1->opcode)
299 + iterative_hash_expr (vro1->op1, vro1->opcode)
300 + iterative_hash_expr (vro1->op2, vro1->opcode);
303 /* Return the hashcode for a given reference operation P1. */
306 vn_reference_hash (const void *p1)
308 const vn_reference_t vr1 = (vn_reference_t) p1;
309 return vr1->hashcode;
312 /* Compute a hash for the reference operation VR1 and return it. */
314 static inline hashval_t
315 vn_reference_compute_hash (const vn_reference_t vr1)
317 hashval_t result = 0;
320 vn_reference_op_t vro;
322 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
323 result += iterative_hash_expr (v, 0);
324 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
325 result += vn_reference_op_compute_hash (vro);
330 /* Return true if reference operations P1 and P2 are equivalent. This
331 means they have the same set of operands and vuses. */
334 vn_reference_eq (const void *p1, const void *p2)
338 vn_reference_op_t vro;
340 const vn_reference_t vr1 = (vn_reference_t) p1;
341 const vn_reference_t vr2 = (vn_reference_t) p2;
343 if (vr1->vuses == vr2->vuses
344 && vr1->operands == vr2->operands)
347 /* Impossible for them to be equivalent if they have different
349 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
352 /* We require that address operands be canonicalized in a way that
353 two memory references will have the same operands if they are
355 if (VEC_length (vn_reference_op_s, vr1->operands)
356 != VEC_length (vn_reference_op_s, vr2->operands))
359 /* The memory state is more often different than the address of the
360 store/load, so check it first. */
361 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
363 if (VEC_index (tree, vr2->vuses, i) != v)
367 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
369 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
376 /* Place the vuses from STMT into *result */
379 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
387 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
388 VEC_safe_push (tree, gc, *result, vuse);
390 if (VEC_length (tree, *result) > 1)
391 sort_vuses (*result);
395 /* Copy the VUSE names in STMT into a vector, and return
399 copy_vuses_from_stmt (tree stmt)
401 VEC (tree, gc) *vuses = NULL;
403 vuses_to_vec (stmt, &vuses);
408 /* Place the vdefs from STMT into *result */
411 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
419 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
420 VEC_safe_push (tree, gc, *result, vdef);
422 if (VEC_length (tree, *result) > 1)
423 sort_vuses (*result);
426 /* Copy the names of vdef results in STMT into a vector, and return
429 static VEC (tree, gc) *
430 copy_vdefs_from_stmt (tree stmt)
432 VEC (tree, gc) *vdefs = NULL;
434 vdefs_to_vec (stmt, &vdefs);
439 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
440 static VEC (tree, gc) *shared_lookup_vops;
442 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
443 This function will overwrite the current SHARED_LOOKUP_VOPS
447 shared_vuses_from_stmt (tree stmt)
449 VEC_truncate (tree, shared_lookup_vops, 0);
450 vuses_to_vec (stmt, &shared_lookup_vops);
452 return shared_lookup_vops;
455 /* Copy the operations present in load/store/call REF into RESULT, a vector of
456 vn_reference_op_s's. */
459 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
461 /* Calls are different from all other reference operations. */
462 if (TREE_CODE (ref) == CALL_EXPR)
464 vn_reference_op_s temp;
466 call_expr_arg_iterator iter;
469 /* Copy the call_expr opcode, type, function being called, and
471 memset (&temp, 0, sizeof (temp));
472 temp.type = TREE_TYPE (ref);
473 temp.opcode = CALL_EXPR;
474 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
476 callfn = get_callee_fndecl (ref);
478 callfn = CALL_EXPR_FN (ref);
479 temp.type = TREE_TYPE (callfn);
480 temp.opcode = TREE_CODE (callfn);
482 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
484 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
486 memset (&temp, 0, sizeof (temp));
487 temp.type = TREE_TYPE (callarg);
488 temp.opcode = TREE_CODE (callarg);
490 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
495 /* For non-calls, store the information that makes up the address. */
499 vn_reference_op_s temp;
501 memset (&temp, 0, sizeof (temp));
502 temp.type = TREE_TYPE (ref);
503 temp.opcode = TREE_CODE (ref);
507 case ALIGN_INDIRECT_REF:
508 case MISALIGNED_INDIRECT_REF:
510 /* The only operand is the address, which gets its own
511 vn_reference_op_s structure. */
514 /* Record bits and position. */
515 temp.op0 = TREE_OPERAND (ref, 1);
516 temp.op1 = TREE_OPERAND (ref, 2);
519 /* Record field as operand. */
520 temp.op0 = TREE_OPERAND (ref, 1);
522 case ARRAY_RANGE_REF:
524 /* Record index as operand. */
525 temp.op0 = TREE_OPERAND (ref, 1);
526 temp.op1 = TREE_OPERAND (ref, 3);
541 /* These are only interesting for their operands, their
542 existence, and their type. They will never be the last
543 ref in the chain of references (IE they require an
544 operand), so we don't have to put anything
545 for op* as it will be handled by the iteration */
548 case VIEW_CONVERT_EXPR:
555 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
557 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
558 ref = TREE_OPERAND (ref, 0);
564 /* Create a vector of vn_reference_op_s structures from REF, a
565 REFERENCE_CLASS_P tree. The vector is not shared. */
567 static VEC(vn_reference_op_s, heap) *
568 create_reference_ops_from_ref (tree ref)
570 VEC (vn_reference_op_s, heap) *result = NULL;
572 copy_reference_ops_from_ref (ref, &result);
576 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
578 /* Create a vector of vn_reference_op_s structures from REF, a
579 REFERENCE_CLASS_P tree. The vector is shared among all callers of
582 static VEC(vn_reference_op_s, heap) *
583 shared_reference_ops_from_ref (tree ref)
587 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
588 copy_reference_ops_from_ref (ref, &shared_lookup_references);
589 return shared_lookup_references;
593 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
594 structures into their value numbers. This is done in-place, and
595 the vector passed in is returned. */
597 static VEC (vn_reference_op_s, heap) *
598 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
600 vn_reference_op_t vro;
603 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
605 if (vro->opcode == SSA_NAME
606 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
607 vro->op0 = SSA_VAL (vro->op0);
613 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
614 their value numbers. This is done in-place, and the vector passed
617 static VEC (tree, gc) *
618 valueize_vuses (VEC (tree, gc) *orig)
620 bool made_replacement = false;
624 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
626 if (vuse != SSA_VAL (vuse))
628 made_replacement = true;
629 VEC_replace (tree, orig, i, SSA_VAL (vuse));
633 if (made_replacement && VEC_length (tree, orig) > 1)
639 /* Lookup OP in the current hash table, and return the resulting
640 value number if it exists in the hash table. Return NULL_TREE if
641 it does not exist in the hash table. */
644 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
647 struct vn_reference_s vr1;
649 vr1.vuses = valueize_vuses (vuses);
650 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
651 vr1.hashcode = vn_reference_compute_hash (&vr1);
652 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
657 return ((vn_reference_t)*slot)->result;
660 /* Insert OP into the current hash table with a value number of
664 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
669 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
671 vr1->vuses = valueize_vuses (vuses);
672 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
673 vr1->hashcode = vn_reference_compute_hash (vr1);
674 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
676 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
679 /* Because we lookup stores using vuses, and value number failures
680 using the vdefs (see visit_reference_op_store for how and why),
681 it's possible that on failure we may try to insert an already
682 inserted store. This is not wrong, there is no ssa name for a
683 store that we could use as a differentiator anyway. Thus, unlike
684 the other lookup functions, you cannot gcc_assert (!*slot)
692 /* Return the stored hashcode for a unary operation. */
695 vn_unary_op_hash (const void *p1)
697 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
698 return vuo1->hashcode;
701 /* Hash a unary operation P1 and return the result. */
703 static inline hashval_t
704 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
706 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
709 /* Return true if P1 and P2, two unary operations, are equivalent. */
712 vn_unary_op_eq (const void *p1, const void *p2)
714 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
715 const vn_unary_op_t vuo2 = (vn_unary_op_t) p2;
716 return vuo1->opcode == vuo2->opcode
717 && vuo1->type == vuo2->type
718 && expressions_equal_p (vuo1->op0, vuo2->op0);
721 /* Lookup OP in the current hash table, and return the resulting
722 value number if it exists in the hash table. Return NULL_TREE if
723 it does not exist in the hash table. */
726 vn_unary_op_lookup (tree op)
729 struct vn_unary_op_s vuo1;
731 vuo1.opcode = TREE_CODE (op);
732 vuo1.type = TREE_TYPE (op);
733 vuo1.op0 = TREE_OPERAND (op, 0);
735 if (TREE_CODE (vuo1.op0) == SSA_NAME)
736 vuo1.op0 = SSA_VAL (vuo1.op0);
738 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
739 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
743 return ((vn_unary_op_t)*slot)->result;
746 /* Insert OP into the current hash table with a value number of
750 vn_unary_op_insert (tree op, tree result)
753 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
755 vuo1->opcode = TREE_CODE (op);
756 vuo1->type = TREE_TYPE (op);
757 vuo1->op0 = TREE_OPERAND (op, 0);
758 vuo1->result = result;
760 if (TREE_CODE (vuo1->op0) == SSA_NAME)
761 vuo1->op0 = SSA_VAL (vuo1->op0);
763 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
764 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
770 /* Compute and return the hash value for binary operation VBO1. */
772 static inline hashval_t
773 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
775 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
776 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
779 /* Return the computed hashcode for binary operation P1. */
782 vn_binary_op_hash (const void *p1)
784 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
785 return vbo1->hashcode;
788 /* Compare binary operations P1 and P2 and return true if they are
792 vn_binary_op_eq (const void *p1, const void *p2)
794 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
795 const vn_binary_op_t vbo2 = (vn_binary_op_t) p2;
796 return vbo1->opcode == vbo2->opcode
797 && vbo1->type == vbo2->type
798 && expressions_equal_p (vbo1->op0, vbo2->op0)
799 && expressions_equal_p (vbo1->op1, vbo2->op1);
802 /* Lookup OP in the current hash table, and return the resulting
803 value number if it exists in the hash table. Return NULL_TREE if
804 it does not exist in the hash table. */
807 vn_binary_op_lookup (tree op)
810 struct vn_binary_op_s vbo1;
812 vbo1.opcode = TREE_CODE (op);
813 vbo1.type = TREE_TYPE (op);
814 vbo1.op0 = TREE_OPERAND (op, 0);
815 vbo1.op1 = TREE_OPERAND (op, 1);
817 if (TREE_CODE (vbo1.op0) == SSA_NAME)
818 vbo1.op0 = SSA_VAL (vbo1.op0);
819 if (TREE_CODE (vbo1.op1) == SSA_NAME)
820 vbo1.op1 = SSA_VAL (vbo1.op1);
822 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
823 && commutative_tree_code (vbo1.opcode))
825 tree temp = vbo1.op0;
830 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
831 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
835 return ((vn_binary_op_t)*slot)->result;
838 /* Insert OP into the current hash table with a value number of
842 vn_binary_op_insert (tree op, tree result)
846 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
848 vbo1->opcode = TREE_CODE (op);
849 vbo1->type = TREE_TYPE (op);
850 vbo1->op0 = TREE_OPERAND (op, 0);
851 vbo1->op1 = TREE_OPERAND (op, 1);
852 vbo1->result = result;
854 if (TREE_CODE (vbo1->op0) == SSA_NAME)
855 vbo1->op0 = SSA_VAL (vbo1->op0);
856 if (TREE_CODE (vbo1->op1) == SSA_NAME)
857 vbo1->op1 = SSA_VAL (vbo1->op1);
859 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
860 && commutative_tree_code (vbo1->opcode))
862 tree temp = vbo1->op0;
863 vbo1->op0 = vbo1->op1;
866 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
867 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
874 /* Compute a hashcode for PHI operation VP1 and return it. */
876 static inline hashval_t
877 vn_phi_compute_hash (vn_phi_t vp1)
879 hashval_t result = 0;
883 result = vp1->block->index;
885 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
887 if (phi1op == VN_TOP)
889 result += iterative_hash_expr (phi1op, result);
895 /* Return the computed hashcode for phi operation P1. */
898 vn_phi_hash (const void *p1)
900 const vn_phi_t vp1 = (vn_phi_t) p1;
901 return vp1->hashcode;
904 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
907 vn_phi_eq (const void *p1, const void *p2)
909 const vn_phi_t vp1 = (vn_phi_t) p1;
910 const vn_phi_t vp2 = (vn_phi_t) p2;
912 if (vp1->block == vp2->block)
917 /* Any phi in the same block will have it's arguments in the
918 same edge order, because of how we store phi nodes. */
919 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
921 tree phi2op = VEC_index (tree, vp2->phiargs, i);
922 if (phi1op == VN_TOP || phi2op == VN_TOP)
924 if (!expressions_equal_p (phi1op, phi2op))
932 static VEC(tree, heap) *shared_lookup_phiargs;
934 /* Lookup PHI in the current hash table, and return the resulting
935 value number if it exists in the hash table. Return NULL_TREE if
936 it does not exist in the hash table. */
939 vn_phi_lookup (tree phi)
945 VEC_truncate (tree, shared_lookup_phiargs, 0);
947 /* Canonicalize the SSA_NAME's to their value number. */
948 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
950 tree def = PHI_ARG_DEF (phi, i);
951 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
952 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
954 vp1.phiargs = shared_lookup_phiargs;
955 vp1.block = bb_for_stmt (phi);
956 vp1.hashcode = vn_phi_compute_hash (&vp1);
957 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
961 return ((vn_phi_t)*slot)->result;
964 /* Insert PHI into the current hash table with a value number of
968 vn_phi_insert (tree phi, tree result)
971 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
973 VEC (tree, heap) *args = NULL;
975 /* Canonicalize the SSA_NAME's to their value number. */
976 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
978 tree def = PHI_ARG_DEF (phi, i);
979 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
980 VEC_safe_push (tree, heap, args, def);
983 vp1->block = bb_for_stmt (phi);
984 vp1->result = result;
985 vp1->hashcode = vn_phi_compute_hash (vp1);
987 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
990 /* Because we iterate over phi operations more than once, it's
991 possible the slot might already exist here, hence no assert.*/
996 /* Print set of components in strongly connected component SCC to OUT. */
999 print_scc (FILE *out, VEC (tree, heap) *scc)
1004 fprintf (out, "SCC consists of: ");
1005 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1007 print_generic_expr (out, var, 0);
1010 fprintf (out, "\n");
1013 /* Set the value number of FROM to TO, return true if it has changed
1017 set_ssa_val_to (tree from, tree to)
1020 gcc_assert (to != NULL);
1022 /* The only thing we allow as value numbers are ssa_names and
1023 invariants. So assert that here. */
1024 gcc_assert (TREE_CODE (to) == SSA_NAME || is_gimple_min_invariant (to));
1026 if (dump_file && (dump_flags & TDF_DETAILS))
1028 fprintf (dump_file, "Setting value number of ");
1029 print_generic_expr (dump_file, from, 0);
1030 fprintf (dump_file, " to ");
1031 print_generic_expr (dump_file, to, 0);
1032 fprintf (dump_file, "\n");
1035 currval = SSA_VAL (from);
1037 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1039 SSA_VAL (from) = to;
1045 /* Set all definitions in STMT to value number to themselves.
1046 Return true if a value number changed. */
1049 defs_to_varying (tree stmt)
1051 bool changed = false;
1055 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1057 tree def = DEF_FROM_PTR (defp);
1059 VN_INFO (def)->use_processed = true;
1060 changed |= set_ssa_val_to (def, def);
1065 /* Visit a copy between LHS and RHS, return true if the value number
1069 visit_copy (tree lhs, tree rhs)
1072 /* Follow chains of copies to their destination. */
1073 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1074 rhs = SSA_VAL (rhs);
1076 /* The copy may have a more interesting constant filled expression
1077 (we don't, since we know our RHS is just an SSA name). */
1078 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1079 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1081 return set_ssa_val_to (lhs, rhs);
1084 /* Visit a unary operator RHS, value number it, and return true if the
1085 value number of LHS has changed as a result. */
1088 visit_unary_op (tree lhs, tree op)
1090 bool changed = false;
1091 tree result = vn_unary_op_lookup (op);
1095 changed = set_ssa_val_to (lhs, result);
1099 changed = set_ssa_val_to (lhs, lhs);
1100 vn_unary_op_insert (op, lhs);
1106 /* Visit a binary operator RHS, value number it, and return true if the
1107 value number of LHS has changed as a result. */
1110 visit_binary_op (tree lhs, tree op)
1112 bool changed = false;
1113 tree result = vn_binary_op_lookup (op);
1117 changed = set_ssa_val_to (lhs, result);
1121 changed = set_ssa_val_to (lhs, lhs);
1122 vn_binary_op_insert (op, lhs);
1128 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1129 and return true if the value number of the LHS has changed as a result. */
1132 visit_reference_op_load (tree lhs, tree op, tree stmt)
1134 bool changed = false;
1135 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1139 changed = set_ssa_val_to (lhs, result);
1143 changed = set_ssa_val_to (lhs, lhs);
1144 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1151 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1152 and return true if the value number of the LHS has changed as a result. */
1155 visit_reference_op_store (tree lhs, tree op, tree stmt)
1157 bool changed = false;
1159 bool resultsame = false;
1161 /* First we want to lookup using the *vuses* from the store and see
1162 if there the last store to this location with the same address
1165 The vuses represent the memory state before the store. If the
1166 memory state, address, and value of the store is the same as the
1167 last store to this location, then this store will produce the
1168 same memory state as that store.
1170 In this case the vdef versions for this store are value numbered to those
1171 vuse versions, since they represent the same memory state after
1174 Otherwise, the vdefs for the store are used when inserting into
1175 the table, since the store generates a new memory state. */
1177 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1181 if (TREE_CODE (result) == SSA_NAME)
1182 result = SSA_VAL (result);
1183 resultsame = expressions_equal_p (result, op);
1186 if (!result || !resultsame)
1188 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, "No store match\n");
1195 fprintf (dump_file, "Value numbering store ");
1196 print_generic_expr (dump_file, lhs, 0);
1197 fprintf (dump_file, " to ");
1198 print_generic_expr (dump_file, op, 0);
1199 fprintf (dump_file, "\n");
1201 /* Have to set value numbers before insert, since insert is
1202 going to valueize the references in-place. */
1203 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1205 VN_INFO (vdef)->use_processed = true;
1206 changed |= set_ssa_val_to (vdef, vdef);
1209 vn_reference_insert (lhs, op, vdefs);
1213 /* We had a match, so value number the vdefs to have the value
1214 number of the vuses they came from. */
1215 ssa_op_iter op_iter;
1219 if (dump_file && (dump_flags & TDF_DETAILS))
1220 fprintf (dump_file, "Store matched earlier value,"
1221 "value numbering store vdefs to matching vuses.\n");
1223 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1225 tree def = DEF_FROM_PTR (var);
1228 /* Uh, if the vuse is a multiuse, we can't really do much
1229 here, sadly, since we don't know which value number of
1230 which vuse to use. */
1231 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1234 use = VUSE_ELEMENT_VAR (*vv, 0);
1236 VN_INFO (def)->use_processed = true;
1237 changed |= set_ssa_val_to (def, SSA_VAL (use));
1244 /* Visit and value number PHI, return true if the value number
1248 visit_phi (tree phi)
1250 bool changed = false;
1252 tree sameval = VN_TOP;
1253 bool allsame = true;
1256 /* See if all non-TOP arguments have the same value. TOP is
1257 equivalent to everything, so we can ignore it. */
1258 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1260 tree def = PHI_ARG_DEF (phi, i);
1262 if (TREE_CODE (def) == SSA_NAME)
1263 def = SSA_VAL (def);
1266 if (sameval == VN_TOP)
1272 if (!expressions_equal_p (def, sameval))
1280 /* If all value numbered to the same value, the phi node has that
1284 if (is_gimple_min_invariant (sameval))
1286 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1287 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1291 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1292 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1295 if (TREE_CODE (sameval) == SSA_NAME)
1296 return visit_copy (PHI_RESULT (phi), sameval);
1298 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1301 /* Otherwise, see if it is equivalent to a phi node in this block. */
1302 result = vn_phi_lookup (phi);
1305 if (TREE_CODE (result) == SSA_NAME)
1306 changed = visit_copy (PHI_RESULT (phi), result);
1308 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1312 vn_phi_insert (phi, PHI_RESULT (phi));
1313 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1314 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1315 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1321 /* Return true if EXPR contains constants. */
1324 expr_has_constants (tree expr)
1326 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1329 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1332 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1333 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1334 /* Constants inside reference ops are rarely interesting, but
1335 it can take a lot of looking to find them. */
1337 case tcc_declaration:
1340 return is_gimple_min_invariant (expr);
1345 /* Replace SSA_NAMES in expr with their value numbers, and return the
1347 This is performed in place. */
1350 valueize_expr (tree expr)
1352 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1355 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1356 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1357 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1360 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1361 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1362 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1363 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1364 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1365 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1373 /* Simplify the binary expression RHS, and return the result if
1377 simplify_binary_expression (tree rhs)
1379 tree result = NULL_TREE;
1380 tree op0 = TREE_OPERAND (rhs, 0);
1381 tree op1 = TREE_OPERAND (rhs, 1);
1383 /* This will not catch every single case we could combine, but will
1384 catch those with constants. The goal here is to simultaneously
1385 combine constants between expressions, but avoid infinite
1386 expansion of expressions during simplification. */
1387 if (TREE_CODE (op0) == SSA_NAME)
1389 if (VN_INFO (op0)->has_constants)
1390 op0 = valueize_expr (VN_INFO (op0)->expr);
1391 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1392 op0 = VN_INFO (op0)->valnum;
1395 if (TREE_CODE (op1) == SSA_NAME)
1397 if (VN_INFO (op1)->has_constants)
1398 op1 = valueize_expr (VN_INFO (op1)->expr);
1399 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1400 op1 = VN_INFO (op1)->valnum;
1403 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1405 /* Make sure result is not a complex expression consisting
1406 of operators of operators (IE (a + b) + (a + c))
1407 Otherwise, we will end up with unbounded expressions if
1408 fold does anything at all. */
1409 if (result && valid_gimple_expression_p (result))
1415 /* Try to simplify RHS using equivalences and constant folding. */
1418 try_to_simplify (tree stmt, tree rhs)
1420 if (TREE_CODE (rhs) == SSA_NAME)
1422 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1423 return SSA_VAL (rhs);
1424 else if (VN_INFO (rhs)->has_constants)
1425 return VN_INFO (rhs)->expr;
1429 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1431 /* For references, see if we find a result for the lookup,
1432 and use it if we do. */
1433 case tcc_declaration:
1434 /* Pull out any truly constant values. */
1435 if (TREE_READONLY (rhs)
1436 && TREE_STATIC (rhs)
1437 && DECL_INITIAL (rhs)
1438 && is_gimple_min_invariant (DECL_INITIAL (rhs)))
1439 return DECL_INITIAL (rhs);
1444 tree result = vn_reference_lookup (rhs,
1445 shared_vuses_from_stmt (stmt));
1450 /* We could do a little more with unary ops, if they expand
1451 into binary ops, but it's debatable whether it is worth it. */
1454 tree result = NULL_TREE;
1455 tree op0 = TREE_OPERAND (rhs, 0);
1456 if (TREE_CODE (op0) == SSA_NAME && VN_INFO (op0)->has_constants)
1457 op0 = VN_INFO (op0)->expr;
1458 else if (TREE_CODE (op0) == SSA_NAME && SSA_VAL (op0) != op0)
1459 op0 = SSA_VAL (op0);
1460 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1465 case tcc_comparison:
1467 return simplify_binary_expression (rhs);
1476 /* Visit and value number USE, return true if the value number
1480 visit_use (tree use)
1482 bool changed = false;
1483 tree stmt = SSA_NAME_DEF_STMT (use);
1486 VN_INFO (use)->use_processed = true;
1488 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1489 if (dump_file && (dump_flags & TDF_DETAILS))
1491 fprintf (dump_file, "Value numbering ");
1492 print_generic_expr (dump_file, use, 0);
1493 fprintf (dump_file, " stmt = ");
1494 print_generic_stmt (dump_file, stmt, 0);
1497 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1498 if (TREE_CODE (stmt) == RETURN_EXPR
1499 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1500 stmt = TREE_OPERAND (stmt, 0);
1502 ann = stmt_ann (stmt);
1504 /* Handle uninitialized uses. */
1505 if (IS_EMPTY_STMT (stmt))
1507 changed = set_ssa_val_to (use, use);
1511 if (TREE_CODE (stmt) == PHI_NODE)
1513 changed = visit_phi (stmt);
1515 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1516 || (ann && ann->has_volatile_ops))
1518 changed = defs_to_varying (stmt);
1522 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1523 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1526 STRIP_USELESS_TYPE_CONVERSION (rhs);
1528 /* Shortcut for copies. Simplifying copies is pointless,
1529 since we copy the expression and value they represent. */
1530 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1532 changed = visit_copy (lhs, rhs);
1535 simplified = try_to_simplify (stmt, rhs);
1536 if (simplified && simplified != rhs)
1538 if (dump_file && (dump_flags & TDF_DETAILS))
1540 fprintf (dump_file, "RHS ");
1541 print_generic_expr (dump_file, rhs, 0);
1542 fprintf (dump_file, " simplified to ");
1543 print_generic_expr (dump_file, simplified, 0);
1544 if (TREE_CODE (lhs) == SSA_NAME)
1545 fprintf (dump_file, " has constants %d\n",
1546 VN_INFO (lhs)->has_constants);
1548 fprintf (dump_file, "\n");
1552 /* Setting value numbers to constants will occasionally
1553 screw up phi congruence because constants are not
1554 uniquely associated with a single ssa name that can be
1556 if (simplified && is_gimple_min_invariant (simplified)
1557 && TREE_CODE (lhs) == SSA_NAME
1558 && simplified != rhs)
1560 VN_INFO (lhs)->expr = simplified;
1561 VN_INFO (lhs)->has_constants = true;
1562 changed = set_ssa_val_to (lhs, simplified);
1565 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1566 && TREE_CODE (lhs) == SSA_NAME)
1568 changed = visit_copy (lhs, simplified);
1571 else if (simplified)
1573 if (TREE_CODE (lhs) == SSA_NAME)
1575 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1576 /* We have to unshare the expression or else
1577 valuizing may change the IL stream. */
1578 VN_INFO (lhs)->expr = unshare_expr (simplified);
1582 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1584 VN_INFO (lhs)->has_constants = true;
1585 VN_INFO (lhs)->expr = unshare_expr (rhs);
1587 else if (TREE_CODE (lhs) == SSA_NAME)
1589 /* We reset expr and constantness here because we may
1590 have been value numbering optimistically, and
1591 iterating. They may become non-constant in this case,
1592 even if they were optimistically constant. */
1594 VN_INFO (lhs)->has_constants = false;
1595 VN_INFO (lhs)->expr = lhs;
1598 if (TREE_CODE (lhs) == SSA_NAME
1599 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1600 changed = defs_to_varying (stmt);
1601 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1603 changed = visit_reference_op_store (lhs, rhs, stmt);
1605 else if (TREE_CODE (lhs) == SSA_NAME)
1607 if (is_gimple_min_invariant (rhs))
1609 VN_INFO (lhs)->has_constants = true;
1610 VN_INFO (lhs)->expr = rhs;
1611 changed = set_ssa_val_to (lhs, rhs);
1615 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1618 changed = visit_unary_op (lhs, rhs);
1621 changed = visit_binary_op (lhs, rhs);
1623 /* If tcc_vl_expr ever encompasses more than
1624 CALL_EXPR, this will need to be changed. */
1626 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1627 changed = visit_reference_op_load (lhs, rhs, stmt);
1629 changed = defs_to_varying (stmt);
1631 case tcc_declaration:
1633 changed = visit_reference_op_load (lhs, rhs, stmt);
1635 case tcc_expression:
1636 if (TREE_CODE (rhs) == ADDR_EXPR)
1638 changed = visit_unary_op (lhs, rhs);
1643 changed = defs_to_varying (stmt);
1649 changed = defs_to_varying (stmt);
1656 /* Compare two operands by reverse postorder index */
1659 compare_ops (const void *pa, const void *pb)
1661 const tree opa = *((const tree *)pa);
1662 const tree opb = *((const tree *)pb);
1663 tree opstmta = SSA_NAME_DEF_STMT (opa);
1664 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1668 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1670 else if (IS_EMPTY_STMT (opstmta))
1672 else if (IS_EMPTY_STMT (opstmtb))
1675 bba = bb_for_stmt (opstmta);
1676 bbb = bb_for_stmt (opstmtb);
1687 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1689 else if (TREE_CODE (opstmta) == PHI_NODE)
1691 else if (TREE_CODE (opstmtb) == PHI_NODE)
1693 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1695 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1698 /* Sort an array containing members of a strongly connected component
1699 SCC so that the members are ordered by RPO number.
1700 This means that when the sort is complete, iterating through the
1701 array will give you the members in RPO order. */
1704 sort_scc (VEC (tree, heap) *scc)
1706 qsort (VEC_address (tree, scc),
1707 VEC_length (tree, scc),
1712 /* Process a strongly connected component in the SSA graph. */
1715 process_scc (VEC (tree, heap) *scc)
1717 /* If the SCC has a single member, just visit it. */
1719 if (VEC_length (tree, scc) == 1)
1721 tree use = VEC_index (tree, scc, 0);
1722 if (!VN_INFO (use)->use_processed)
1729 unsigned int iterations = 0;
1730 bool changed = true;
1732 /* Iterate over the SCC with the optimistic table until it stops
1734 current_info = optimistic_info;
1739 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1740 changed |= visit_use (var);
1743 if (dump_file && (dump_flags & TDF_STATS))
1744 fprintf (dump_file, "Processing SCC required %d iterations\n",
1747 /* Finally, visit the SCC once using the valid table. */
1748 current_info = valid_info;
1749 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1754 /* Depth first search on NAME to discover and process SCC's in the SSA
1756 Execution of this algorithm relies on the fact that the SCC's are
1757 popped off the stack in topological order. */
1767 VN_INFO (name)->dfsnum = next_dfs_num++;
1768 VN_INFO (name)->visited = true;
1769 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1771 VEC_safe_push (tree, heap, sccstack, name);
1772 VN_INFO (name)->on_sccstack = true;
1773 defstmt = SSA_NAME_DEF_STMT (name);
1775 /* Recursively DFS on our operands, looking for SCC's. */
1776 if (!IS_EMPTY_STMT (defstmt))
1778 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1781 tree use = USE_FROM_PTR (usep);
1783 /* Since we handle phi nodes, we will sometimes get
1784 invariants in the use expression. */
1785 if (TREE_CODE (use) != SSA_NAME)
1788 if (! (VN_INFO (use)->visited))
1791 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1792 VN_INFO (use)->low);
1794 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1795 && VN_INFO (use)->on_sccstack)
1797 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1798 VN_INFO (name)->low);
1803 /* See if we found an SCC. */
1804 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1806 VEC (tree, heap) *scc = NULL;
1809 /* Found an SCC, pop the components off the SCC stack and
1813 x = VEC_pop (tree, sccstack);
1815 VN_INFO (x)->on_sccstack = false;
1816 VEC_safe_push (tree, heap, scc, x);
1817 } while (x != name);
1819 if (VEC_length (tree, scc) > 1)
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1823 print_scc (dump_file, scc);
1827 VEC_free (tree, heap, scc);
1835 VEC_free (tree, heap, phi->phiargs);
1839 /* Free a reference operation structure VP. */
1842 free_reference (void *vp)
1844 vn_reference_t vr = vp;
1845 VEC_free (vn_reference_op_s, heap, vr->operands);
1848 /* Allocate a value number table. */
1851 allocate_vn_table (vn_tables_t table)
1853 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1854 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1855 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1856 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1859 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1860 sizeof (struct vn_unary_op_s),
1862 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1863 sizeof (struct vn_binary_op_s),
1865 table->phis_pool = create_alloc_pool ("VN phis",
1866 sizeof (struct vn_phi_s),
1868 table->references_pool = create_alloc_pool ("VN references",
1869 sizeof (struct vn_reference_s),
1873 /* Free a value number table. */
1876 free_vn_table (vn_tables_t table)
1878 htab_delete (table->phis);
1879 htab_delete (table->unary);
1880 htab_delete (table->binary);
1881 htab_delete (table->references);
1882 free_alloc_pool (table->unary_op_pool);
1883 free_alloc_pool (table->binary_op_pool);
1884 free_alloc_pool (table->phis_pool);
1885 free_alloc_pool (table->references_pool);
1893 int *rpo_numbers_temp;
1897 calculate_dominance_info (CDI_DOMINATORS);
1901 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1902 /* VEC_alloc doesn't actually grow it to the right size, it just
1903 preallocates the space to do so. */
1904 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1905 shared_lookup_phiargs = NULL;
1906 shared_lookup_vops = NULL;
1907 shared_lookup_references = NULL;
1908 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1909 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1910 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1912 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1913 the i'th block in RPO order is bb. We want to map bb's to RPO
1914 numbers, so we need to rearrange this array. */
1915 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1916 rpo_numbers[rpo_numbers_temp[j]] = j;
1918 free (rpo_numbers_temp);
1920 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1922 /* Create the VN_INFO structures, and initialize value numbers to
1924 for (i = 0; i < num_ssa_names; i++)
1926 tree name = ssa_name (i);
1929 VN_INFO_GET (name)->valnum = VN_TOP;
1930 VN_INFO (name)->expr = name;
1936 block_stmt_iterator bsi;
1937 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1939 tree stmt = bsi_stmt (bsi);
1940 stmt_ann (stmt)->uid = id++;
1944 /* Create the valid and optimistic value numbering tables. */
1945 valid_info = XCNEW (struct vn_tables_s);
1946 allocate_vn_table (valid_info);
1947 optimistic_info = XCNEW (struct vn_tables_s);
1948 allocate_vn_table (optimistic_info);
1953 switch_to_PRE_table (void)
1955 pre_info = XCNEW (struct vn_tables_s);
1956 allocate_vn_table (pre_info);
1957 current_info = pre_info;
1965 VEC_free (tree, heap, shared_lookup_phiargs);
1966 VEC_free (tree, gc, shared_lookup_vops);
1967 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
1968 XDELETEVEC (rpo_numbers);
1969 for (i = 0; i < num_ssa_names; i++)
1971 tree name = ssa_name (i);
1974 XDELETE (VN_INFO (name));
1975 if (SSA_NAME_VALUE (name) &&
1976 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
1977 SSA_NAME_VALUE (name) = NULL;
1981 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
1982 VEC_free (tree, heap, sccstack);
1983 free_vn_table (valid_info);
1984 XDELETE (valid_info);
1985 free_vn_table (optimistic_info);
1986 XDELETE (optimistic_info);
1989 free_vn_table (pre_info);
2001 current_info = valid_info;
2003 for (param = DECL_ARGUMENTS (current_function_decl);
2005 param = TREE_CHAIN (param))
2007 if (gimple_default_def (cfun, param) != NULL)
2009 tree def = gimple_default_def (cfun, param);
2010 SSA_VAL (def) = def;
2014 for (i = num_ssa_names - 1; i > 0; i--)
2016 tree name = ssa_name (i);
2018 && VN_INFO (name)->visited == false
2019 && !has_zero_uses (name))
2023 if (dump_file && (dump_flags & TDF_DETAILS))
2025 fprintf (dump_file, "Value numbers:\n");
2026 for (i = 0; i < num_ssa_names; i++)
2028 tree name = ssa_name (i);
2029 if (name && VN_INFO (name)->visited
2030 && (SSA_VAL (name) != name
2031 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2033 print_generic_expr (dump_file, name, 0);
2034 fprintf (dump_file, " = ");
2035 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2036 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2038 print_generic_expr (dump_file, SSA_VAL (name), 0);
2039 fprintf (dump_file, "\n");