1 /* SCC value numbering for trees
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
33 #include "tree-gimple.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
40 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
135 /* Unary operations in the hashtable consist of a single operand, an
136 opcode, and a type. Result is the value number of the operation,
137 and hashcode is stored to avoid having to calculate it repeatedly. */
139 typedef struct vn_unary_op_s
141 enum tree_code opcode;
148 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
149 arguments, and the basic block the phi is in. Result is the value
150 number of the operation, and hashcode is stored to avoid having to
151 calculate it repeatedly. Phi nodes not in the same block are never
152 considered equivalent. */
154 typedef struct vn_phi_s
156 VEC (tree, heap) *phiargs;
162 /* Reference operands only exist in reference operations structures.
163 They consist of an opcode, type, and some number of operands. For
164 a given opcode, some, all, or none of the operands may be used.
165 The operands are there to store the information that makes up the
166 portion of the addressing calculation that opcode performs. */
168 typedef struct vn_reference_op_struct
170 enum tree_code opcode;
176 typedef vn_reference_op_s *vn_reference_op_t;
178 DEF_VEC_O(vn_reference_op_s);
179 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
181 /* A reference operation in the hashtable is representation as a
182 collection of vuses, representing the memory state at the time of
183 the operation, and a collection of operands that make up the
184 addressing calculation. If two vn_reference_t's have the same set
185 of operands, they access the same memory location. We also store
186 the resulting value number, and the hashcode. The vuses are
187 always stored in order sorted by ssa name version. */
189 typedef struct vn_reference_s
191 VEC (tree, gc) *vuses;
192 VEC (vn_reference_op_s, heap) *operands;
197 /* Valid hashtables storing information we have proven to be
200 static vn_tables_t valid_info;
202 /* Optimistic hashtables storing information we are making assumptions about
203 during iterations. */
205 static vn_tables_t optimistic_info;
207 /* PRE hashtables storing information about mapping from expressions to
210 static vn_tables_t pre_info;
212 /* Pointer to the set of hashtables that is currently being used.
213 Should always point to either the optimistic_info, or the
216 static vn_tables_t current_info;
219 /* Reverse post order index for each basic block. */
221 static int *rpo_numbers;
223 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
225 /* This represents the top of the VN lattice, which is the universal
230 /* Next DFS number and the stack for strongly connected component
233 static unsigned int next_dfs_num;
234 static VEC (tree, heap) *sccstack;
236 DEF_VEC_P(vn_ssa_aux_t);
237 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
239 /* Table of vn_ssa_aux_t's, one per ssa_name. */
241 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
243 /* Return the value numbering information for a given SSA name. */
248 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
249 SSA_NAME_VERSION (name));
252 /* Set the value numbering info for a given SSA name to a given
256 VN_INFO_SET (tree name, vn_ssa_aux_t value)
258 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
259 SSA_NAME_VERSION (name), value);
262 /* Get the value numbering info for a given SSA name, creating it if
263 it does not exist. */
266 VN_INFO_GET (tree name)
268 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
269 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
270 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
271 SSA_NAME_VERSION (name) + 1);
272 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
273 SSA_NAME_VERSION (name), newinfo);
278 /* Compare two reference operands P1 and P2 for equality. return true if
279 they are equal, and false otherwise. */
282 vn_reference_op_eq (const void *p1, const void *p2)
284 const vn_reference_op_t vro1 = (vn_reference_op_t) p1;
285 const vn_reference_op_t vro2 = (vn_reference_op_t) p2;
286 return vro1->opcode == vro2->opcode
287 && vro1->type == vro2->type
288 && expressions_equal_p (vro1->op0, vro2->op0)
289 && expressions_equal_p (vro1->op1, vro2->op1)
290 && expressions_equal_p (vro1->op2, vro2->op2);
293 /* Compute the hash for a reference operand VRO1 */
296 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
298 return iterative_hash_expr (vro1->op0, vro1->opcode)
299 + iterative_hash_expr (vro1->op1, vro1->opcode)
300 + iterative_hash_expr (vro1->op2, vro1->opcode);
303 /* Return the hashcode for a given reference operation P1. */
306 vn_reference_hash (const void *p1)
308 const vn_reference_t vr1 = (vn_reference_t) p1;
309 return vr1->hashcode;
312 /* Compute a hash for the reference operation VR1 and return it. */
314 static inline hashval_t
315 vn_reference_compute_hash (const vn_reference_t vr1)
317 hashval_t result = 0;
320 vn_reference_op_t vro;
322 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
323 result += iterative_hash_expr (v, 0);
324 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
325 result += vn_reference_op_compute_hash (vro);
330 /* Return true if reference operations P1 and P2 are equivalent. This
331 means they have the same set of operands and vuses. */
334 vn_reference_eq (const void *p1, const void *p2)
338 vn_reference_op_t vro;
340 const vn_reference_t vr1 = (vn_reference_t) p1;
341 const vn_reference_t vr2 = (vn_reference_t) p2;
343 if (vr1->vuses == vr2->vuses
344 && vr1->operands == vr2->operands)
347 /* Impossible for them to be equivalent if they have different
349 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
352 /* We require that address operands be canonicalized in a way that
353 two memory references will have the same operands if they are
355 if (VEC_length (vn_reference_op_s, vr1->operands)
356 != VEC_length (vn_reference_op_s, vr2->operands))
359 /* The memory state is more often different than the address of the
360 store/load, so check it first. */
361 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
363 if (VEC_index (tree, vr2->vuses, i) != v)
367 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
369 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
376 /* Place the vuses from STMT into *result */
379 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
387 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
388 VEC_safe_push (tree, gc, *result, vuse);
390 if (VEC_length (tree, *result) > 1)
391 sort_vuses (*result);
395 /* Copy the VUSE names in STMT into a vector, and return
399 copy_vuses_from_stmt (tree stmt)
401 VEC (tree, gc) *vuses = NULL;
403 vuses_to_vec (stmt, &vuses);
408 /* Place the vdefs from STMT into *result */
411 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
419 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
420 VEC_safe_push (tree, gc, *result, vdef);
422 if (VEC_length (tree, *result) > 1)
423 sort_vuses (*result);
426 /* Copy the names of vdef results in STMT into a vector, and return
429 static VEC (tree, gc) *
430 copy_vdefs_from_stmt (tree stmt)
432 VEC (tree, gc) *vdefs = NULL;
434 vdefs_to_vec (stmt, &vdefs);
439 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
440 static VEC (tree, gc) *shared_lookup_vops;
442 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
443 This function will overwrite the current SHARED_LOOKUP_VOPS
447 shared_vuses_from_stmt (tree stmt)
449 VEC_truncate (tree, shared_lookup_vops, 0);
450 vuses_to_vec (stmt, &shared_lookup_vops);
452 return shared_lookup_vops;
455 /* Copy the operations present in load/store/call REF into RESULT, a vector of
456 vn_reference_op_s's. */
459 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
461 /* Calls are different from all other reference operations. */
462 if (TREE_CODE (ref) == CALL_EXPR)
464 vn_reference_op_s temp;
466 call_expr_arg_iterator iter;
469 /* Copy the call_expr opcode, type, function being called, and
471 memset (&temp, 0, sizeof (temp));
472 temp.type = TREE_TYPE (ref);
473 temp.opcode = CALL_EXPR;
474 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
476 callfn = get_callee_fndecl (ref);
478 callfn = CALL_EXPR_FN (ref);
479 temp.type = TREE_TYPE (callfn);
480 temp.opcode = TREE_CODE (callfn);
482 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
484 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
486 memset (&temp, 0, sizeof (temp));
487 temp.type = TREE_TYPE (callarg);
488 temp.opcode = TREE_CODE (callarg);
490 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
495 /* For non-calls, store the information that makes up the address. */
499 vn_reference_op_s temp;
501 memset (&temp, 0, sizeof (temp));
502 temp.type = TREE_TYPE (ref);
503 temp.opcode = TREE_CODE (ref);
507 case ALIGN_INDIRECT_REF:
508 case MISALIGNED_INDIRECT_REF:
510 /* The only operand is the address, which gets its own
511 vn_reference_op_s structure. */
514 /* Record bits and position. */
515 temp.op0 = TREE_OPERAND (ref, 1);
516 temp.op1 = TREE_OPERAND (ref, 2);
519 /* Record field as operand. */
520 temp.op0 = TREE_OPERAND (ref, 1);
522 case ARRAY_RANGE_REF:
524 /* Record index as operand. */
525 temp.op0 = TREE_OPERAND (ref, 1);
526 temp.op1 = TREE_OPERAND (ref, 3);
541 /* These are only interesting for their operands, their
542 existence, and their type. They will never be the last
543 ref in the chain of references (IE they require an
544 operand), so we don't have to put anything
545 for op* as it will be handled by the iteration */
548 case VIEW_CONVERT_EXPR:
555 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
557 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
558 ref = TREE_OPERAND (ref, 0);
564 /* Create a vector of vn_reference_op_s structures from REF, a
565 REFERENCE_CLASS_P tree. The vector is not shared. */
567 static VEC(vn_reference_op_s, heap) *
568 create_reference_ops_from_ref (tree ref)
570 VEC (vn_reference_op_s, heap) *result = NULL;
572 copy_reference_ops_from_ref (ref, &result);
576 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
578 /* Create a vector of vn_reference_op_s structures from REF, a
579 REFERENCE_CLASS_P tree. The vector is shared among all callers of
582 static VEC(vn_reference_op_s, heap) *
583 shared_reference_ops_from_ref (tree ref)
587 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
588 copy_reference_ops_from_ref (ref, &shared_lookup_references);
589 return shared_lookup_references;
593 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
594 structures into their value numbers. This is done in-place, and
595 the vector passed in is returned. */
597 static VEC (vn_reference_op_s, heap) *
598 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
600 vn_reference_op_t vro;
603 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
605 if (vro->opcode == SSA_NAME
606 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
607 vro->op0 = SSA_VAL (vro->op0);
613 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
614 their value numbers. This is done in-place, and the vector passed
617 static VEC (tree, gc) *
618 valueize_vuses (VEC (tree, gc) *orig)
620 bool made_replacement = false;
624 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
626 if (vuse != SSA_VAL (vuse))
628 made_replacement = true;
629 VEC_replace (tree, orig, i, SSA_VAL (vuse));
633 if (made_replacement && VEC_length (tree, orig) > 1)
639 /* Lookup OP in the current hash table, and return the resulting
640 value number if it exists in the hash table. Return NULL_TREE if
641 it does not exist in the hash table. */
644 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
647 struct vn_reference_s vr1;
649 vr1.vuses = valueize_vuses (vuses);
650 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
651 vr1.hashcode = vn_reference_compute_hash (&vr1);
652 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
657 return ((vn_reference_t)*slot)->result;
660 /* Insert OP into the current hash table with a value number of
664 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
669 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
671 vr1->vuses = valueize_vuses (vuses);
672 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
673 vr1->hashcode = vn_reference_compute_hash (vr1);
674 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
676 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
679 /* Because we lookup stores using vuses, and value number failures
680 using the vdefs (see visit_reference_op_store for how and why),
681 it's possible that on failure we may try to insert an already
682 inserted store. This is not wrong, there is no ssa name for a
683 store that we could use as a differentiator anyway. Thus, unlike
684 the other lookup functions, you cannot gcc_assert (!*slot)
692 /* Return the stored hashcode for a unary operation. */
695 vn_unary_op_hash (const void *p1)
697 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
698 return vuo1->hashcode;
701 /* Hash a unary operation P1 and return the result. */
703 static inline hashval_t
704 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
706 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
709 /* Return true if P1 and P2, two unary operations, are equivalent. */
712 vn_unary_op_eq (const void *p1, const void *p2)
714 const vn_unary_op_t vuo1 = (vn_unary_op_t) p1;
715 const vn_unary_op_t vuo2 = (vn_unary_op_t) p2;
716 return vuo1->opcode == vuo2->opcode
717 && vuo1->type == vuo2->type
718 && expressions_equal_p (vuo1->op0, vuo2->op0);
721 /* Lookup OP in the current hash table, and return the resulting
722 value number if it exists in the hash table. Return NULL_TREE if
723 it does not exist in the hash table. */
726 vn_unary_op_lookup (tree op)
729 struct vn_unary_op_s vuo1;
731 vuo1.opcode = TREE_CODE (op);
732 vuo1.type = TREE_TYPE (op);
733 vuo1.op0 = TREE_OPERAND (op, 0);
735 if (TREE_CODE (vuo1.op0) == SSA_NAME)
736 vuo1.op0 = SSA_VAL (vuo1.op0);
738 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
739 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
743 return ((vn_unary_op_t)*slot)->result;
746 /* Insert OP into the current hash table with a value number of
750 vn_unary_op_insert (tree op, tree result)
753 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
755 vuo1->opcode = TREE_CODE (op);
756 vuo1->type = TREE_TYPE (op);
757 vuo1->op0 = TREE_OPERAND (op, 0);
758 vuo1->result = result;
760 if (TREE_CODE (vuo1->op0) == SSA_NAME)
761 vuo1->op0 = SSA_VAL (vuo1->op0);
763 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
764 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
770 /* Compute and return the hash value for binary operation VBO1. */
772 static inline hashval_t
773 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
775 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
776 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
779 /* Return the computed hashcode for binary operation P1. */
782 vn_binary_op_hash (const void *p1)
784 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
785 return vbo1->hashcode;
788 /* Compare binary operations P1 and P2 and return true if they are
792 vn_binary_op_eq (const void *p1, const void *p2)
794 const vn_binary_op_t vbo1 = (vn_binary_op_t) p1;
795 const vn_binary_op_t vbo2 = (vn_binary_op_t) p2;
796 return vbo1->opcode == vbo2->opcode
797 && vbo1->type == vbo2->type
798 && expressions_equal_p (vbo1->op0, vbo2->op0)
799 && expressions_equal_p (vbo1->op1, vbo2->op1);
802 /* Lookup OP in the current hash table, and return the resulting
803 value number if it exists in the hash table. Return NULL_TREE if
804 it does not exist in the hash table. */
807 vn_binary_op_lookup (tree op)
810 struct vn_binary_op_s vbo1;
812 vbo1.opcode = TREE_CODE (op);
813 vbo1.type = TREE_TYPE (op);
814 vbo1.op0 = TREE_OPERAND (op, 0);
815 vbo1.op1 = TREE_OPERAND (op, 1);
817 if (TREE_CODE (vbo1.op0) == SSA_NAME)
818 vbo1.op0 = SSA_VAL (vbo1.op0);
819 if (TREE_CODE (vbo1.op1) == SSA_NAME)
820 vbo1.op1 = SSA_VAL (vbo1.op1);
822 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
823 && commutative_tree_code (vbo1.opcode))
825 tree temp = vbo1.op0;
830 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
831 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
835 return ((vn_binary_op_t)*slot)->result;
838 /* Insert OP into the current hash table with a value number of
842 vn_binary_op_insert (tree op, tree result)
846 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
848 vbo1->opcode = TREE_CODE (op);
849 vbo1->type = TREE_TYPE (op);
850 vbo1->op0 = TREE_OPERAND (op, 0);
851 vbo1->op1 = TREE_OPERAND (op, 1);
852 vbo1->result = result;
854 if (TREE_CODE (vbo1->op0) == SSA_NAME)
855 vbo1->op0 = SSA_VAL (vbo1->op0);
856 if (TREE_CODE (vbo1->op1) == SSA_NAME)
857 vbo1->op1 = SSA_VAL (vbo1->op1);
859 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
860 && commutative_tree_code (vbo1->opcode))
862 tree temp = vbo1->op0;
863 vbo1->op0 = vbo1->op1;
866 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
867 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
874 /* Compute a hashcode for PHI operation VP1 and return it. */
876 static inline hashval_t
877 vn_phi_compute_hash (vn_phi_t vp1)
879 hashval_t result = 0;
883 result = vp1->block->index;
885 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
887 if (phi1op == VN_TOP)
889 result += iterative_hash_expr (phi1op, result);
895 /* Return the computed hashcode for phi operation P1. */
898 vn_phi_hash (const void *p1)
900 const vn_phi_t vp1 = (vn_phi_t) p1;
901 return vp1->hashcode;
904 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
907 vn_phi_eq (const void *p1, const void *p2)
909 const vn_phi_t vp1 = (vn_phi_t) p1;
910 const vn_phi_t vp2 = (vn_phi_t) p2;
912 if (vp1->block == vp2->block)
917 /* Any phi in the same block will have it's arguments in the
918 same edge order, because of how we store phi nodes. */
919 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
921 tree phi2op = VEC_index (tree, vp2->phiargs, i);
922 if (phi1op == VN_TOP || phi2op == VN_TOP)
924 if (!expressions_equal_p (phi1op, phi2op))
932 static VEC(tree, heap) *shared_lookup_phiargs;
934 /* Lookup PHI in the current hash table, and return the resulting
935 value number if it exists in the hash table. Return NULL_TREE if
936 it does not exist in the hash table. */
939 vn_phi_lookup (tree phi)
945 VEC_truncate (tree, shared_lookup_phiargs, 0);
947 /* Canonicalize the SSA_NAME's to their value number. */
948 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
950 tree def = PHI_ARG_DEF (phi, i);
951 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
952 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
954 vp1.phiargs = shared_lookup_phiargs;
955 vp1.block = bb_for_stmt (phi);
956 vp1.hashcode = vn_phi_compute_hash (&vp1);
957 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
961 return ((vn_phi_t)*slot)->result;
964 /* Insert PHI into the current hash table with a value number of
968 vn_phi_insert (tree phi, tree result)
971 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
973 VEC (tree, heap) *args = NULL;
975 /* Canonicalize the SSA_NAME's to their value number. */
976 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
978 tree def = PHI_ARG_DEF (phi, i);
979 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
980 VEC_safe_push (tree, heap, args, def);
983 vp1->block = bb_for_stmt (phi);
984 vp1->result = result;
985 vp1->hashcode = vn_phi_compute_hash (vp1);
987 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
990 /* Because we iterate over phi operations more than once, it's
991 possible the slot might already exist here, hence no assert.*/
996 /* Print set of components in strongly connected component SCC to OUT. */
999 print_scc (FILE *out, VEC (tree, heap) *scc)
1004 fprintf (out, "SCC consists of: ");
1005 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1007 print_generic_expr (out, var, 0);
1010 fprintf (out, "\n");
1013 /* Set the value number of FROM to TO, return true if it has changed
1017 set_ssa_val_to (tree from, tree to)
1021 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1022 and invariants. So assert that here. */
1023 gcc_assert (to != NULL_TREE
1025 || TREE_CODE (to) == SSA_NAME
1026 || is_gimple_min_invariant (to)));
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fprintf (dump_file, "Setting value number of ");
1031 print_generic_expr (dump_file, from, 0);
1032 fprintf (dump_file, " to ");
1033 print_generic_expr (dump_file, to, 0);
1034 fprintf (dump_file, "\n");
1037 currval = SSA_VAL (from);
1039 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1041 SSA_VAL (from) = to;
1047 /* Set all definitions in STMT to value number to themselves.
1048 Return true if a value number changed. */
1051 defs_to_varying (tree stmt)
1053 bool changed = false;
1057 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1059 tree def = DEF_FROM_PTR (defp);
1061 VN_INFO (def)->use_processed = true;
1062 changed |= set_ssa_val_to (def, def);
1067 /* Visit a copy between LHS and RHS, return true if the value number
1071 visit_copy (tree lhs, tree rhs)
1074 /* Follow chains of copies to their destination. */
1075 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1076 rhs = SSA_VAL (rhs);
1078 /* The copy may have a more interesting constant filled expression
1079 (we don't, since we know our RHS is just an SSA name). */
1080 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1081 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1083 return set_ssa_val_to (lhs, rhs);
1086 /* Visit a unary operator RHS, value number it, and return true if the
1087 value number of LHS has changed as a result. */
1090 visit_unary_op (tree lhs, tree op)
1092 bool changed = false;
1093 tree result = vn_unary_op_lookup (op);
1097 changed = set_ssa_val_to (lhs, result);
1101 changed = set_ssa_val_to (lhs, lhs);
1102 vn_unary_op_insert (op, lhs);
1108 /* Visit a binary operator RHS, value number it, and return true if the
1109 value number of LHS has changed as a result. */
1112 visit_binary_op (tree lhs, tree op)
1114 bool changed = false;
1115 tree result = vn_binary_op_lookup (op);
1119 changed = set_ssa_val_to (lhs, result);
1123 changed = set_ssa_val_to (lhs, lhs);
1124 vn_binary_op_insert (op, lhs);
1130 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1131 and return true if the value number of the LHS has changed as a result. */
1134 visit_reference_op_load (tree lhs, tree op, tree stmt)
1136 bool changed = false;
1137 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1141 changed = set_ssa_val_to (lhs, result);
1145 changed = set_ssa_val_to (lhs, lhs);
1146 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1153 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1154 and return true if the value number of the LHS has changed as a result. */
1157 visit_reference_op_store (tree lhs, tree op, tree stmt)
1159 bool changed = false;
1161 bool resultsame = false;
1163 /* First we want to lookup using the *vuses* from the store and see
1164 if there the last store to this location with the same address
1167 The vuses represent the memory state before the store. If the
1168 memory state, address, and value of the store is the same as the
1169 last store to this location, then this store will produce the
1170 same memory state as that store.
1172 In this case the vdef versions for this store are value numbered to those
1173 vuse versions, since they represent the same memory state after
1176 Otherwise, the vdefs for the store are used when inserting into
1177 the table, since the store generates a new memory state. */
1179 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1183 if (TREE_CODE (result) == SSA_NAME)
1184 result = SSA_VAL (result);
1185 resultsame = expressions_equal_p (result, op);
1188 if (!result || !resultsame)
1190 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1194 if (dump_file && (dump_flags & TDF_DETAILS))
1196 fprintf (dump_file, "No store match\n");
1197 fprintf (dump_file, "Value numbering store ");
1198 print_generic_expr (dump_file, lhs, 0);
1199 fprintf (dump_file, " to ");
1200 print_generic_expr (dump_file, op, 0);
1201 fprintf (dump_file, "\n");
1203 /* Have to set value numbers before insert, since insert is
1204 going to valueize the references in-place. */
1205 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1207 VN_INFO (vdef)->use_processed = true;
1208 changed |= set_ssa_val_to (vdef, vdef);
1211 vn_reference_insert (lhs, op, vdefs);
1215 /* We had a match, so value number the vdefs to have the value
1216 number of the vuses they came from. */
1217 ssa_op_iter op_iter;
1221 if (dump_file && (dump_flags & TDF_DETAILS))
1222 fprintf (dump_file, "Store matched earlier value,"
1223 "value numbering store vdefs to matching vuses.\n");
1225 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1227 tree def = DEF_FROM_PTR (var);
1230 /* Uh, if the vuse is a multiuse, we can't really do much
1231 here, sadly, since we don't know which value number of
1232 which vuse to use. */
1233 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1236 use = VUSE_ELEMENT_VAR (*vv, 0);
1238 VN_INFO (def)->use_processed = true;
1239 changed |= set_ssa_val_to (def, SSA_VAL (use));
1246 /* Visit and value number PHI, return true if the value number
1250 visit_phi (tree phi)
1252 bool changed = false;
1254 tree sameval = VN_TOP;
1255 bool allsame = true;
1258 /* See if all non-TOP arguments have the same value. TOP is
1259 equivalent to everything, so we can ignore it. */
1260 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1262 tree def = PHI_ARG_DEF (phi, i);
1264 if (TREE_CODE (def) == SSA_NAME)
1265 def = SSA_VAL (def);
1268 if (sameval == VN_TOP)
1274 if (!expressions_equal_p (def, sameval))
1282 /* If all value numbered to the same value, the phi node has that
1286 if (is_gimple_min_invariant (sameval))
1288 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1289 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1293 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1294 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1297 if (TREE_CODE (sameval) == SSA_NAME)
1298 return visit_copy (PHI_RESULT (phi), sameval);
1300 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1303 /* Otherwise, see if it is equivalent to a phi node in this block. */
1304 result = vn_phi_lookup (phi);
1307 if (TREE_CODE (result) == SSA_NAME)
1308 changed = visit_copy (PHI_RESULT (phi), result);
1310 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1314 vn_phi_insert (phi, PHI_RESULT (phi));
1315 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1316 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1317 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1323 /* Return true if EXPR contains constants. */
1326 expr_has_constants (tree expr)
1328 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1331 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1334 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1335 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1336 /* Constants inside reference ops are rarely interesting, but
1337 it can take a lot of looking to find them. */
1339 case tcc_declaration:
1342 return is_gimple_min_invariant (expr);
1347 /* Replace SSA_NAMES in expr with their value numbers, and return the
1349 This is performed in place. */
1352 valueize_expr (tree expr)
1354 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1357 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1358 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1359 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1362 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1363 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1364 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1365 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1366 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1367 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1375 /* Simplify the binary expression RHS, and return the result if
1379 simplify_binary_expression (tree rhs)
1381 tree result = NULL_TREE;
1382 tree op0 = TREE_OPERAND (rhs, 0);
1383 tree op1 = TREE_OPERAND (rhs, 1);
1385 /* This will not catch every single case we could combine, but will
1386 catch those with constants. The goal here is to simultaneously
1387 combine constants between expressions, but avoid infinite
1388 expansion of expressions during simplification. */
1389 if (TREE_CODE (op0) == SSA_NAME)
1391 if (VN_INFO (op0)->has_constants)
1392 op0 = valueize_expr (VN_INFO (op0)->expr);
1393 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1394 op0 = SSA_VAL (op0);
1397 if (TREE_CODE (op1) == SSA_NAME)
1399 if (VN_INFO (op1)->has_constants)
1400 op1 = valueize_expr (VN_INFO (op1)->expr);
1401 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1402 op1 = SSA_VAL (op1);
1405 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1407 /* Make sure result is not a complex expression consisting
1408 of operators of operators (IE (a + b) + (a + c))
1409 Otherwise, we will end up with unbounded expressions if
1410 fold does anything at all. */
1411 if (result && valid_gimple_expression_p (result))
1417 /* Try to simplify RHS using equivalences and constant folding. */
1420 try_to_simplify (tree stmt, tree rhs)
1422 if (TREE_CODE (rhs) == SSA_NAME)
1424 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1425 return SSA_VAL (rhs);
1426 else if (VN_INFO (rhs)->has_constants)
1427 return VN_INFO (rhs)->expr;
1431 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1433 /* For references, see if we find a result for the lookup,
1434 and use it if we do. */
1435 case tcc_declaration:
1436 /* Pull out any truly constant values. */
1437 if (TREE_READONLY (rhs)
1438 && TREE_STATIC (rhs)
1439 && DECL_INITIAL (rhs)
1440 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1441 return DECL_INITIAL (rhs);
1446 tree result = vn_reference_lookup (rhs,
1447 shared_vuses_from_stmt (stmt));
1452 /* We could do a little more with unary ops, if they expand
1453 into binary ops, but it's debatable whether it is worth it. */
1456 tree result = NULL_TREE;
1457 tree op0 = TREE_OPERAND (rhs, 0);
1458 if (TREE_CODE (op0) == SSA_NAME && VN_INFO (op0)->has_constants)
1459 op0 = VN_INFO (op0)->expr;
1460 else if (TREE_CODE (op0) == SSA_NAME && SSA_VAL (op0) != op0)
1461 op0 = SSA_VAL (op0);
1462 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1467 case tcc_comparison:
1469 return simplify_binary_expression (rhs);
1478 /* Visit and value number USE, return true if the value number
1482 visit_use (tree use)
1484 bool changed = false;
1485 tree stmt = SSA_NAME_DEF_STMT (use);
1488 VN_INFO (use)->use_processed = true;
1490 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1491 if (dump_file && (dump_flags & TDF_DETAILS))
1493 fprintf (dump_file, "Value numbering ");
1494 print_generic_expr (dump_file, use, 0);
1495 fprintf (dump_file, " stmt = ");
1496 print_generic_stmt (dump_file, stmt, 0);
1499 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1500 if (TREE_CODE (stmt) == RETURN_EXPR
1501 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1502 stmt = TREE_OPERAND (stmt, 0);
1504 ann = stmt_ann (stmt);
1506 /* Handle uninitialized uses. */
1507 if (IS_EMPTY_STMT (stmt))
1509 changed = set_ssa_val_to (use, use);
1513 if (TREE_CODE (stmt) == PHI_NODE)
1515 changed = visit_phi (stmt);
1517 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1518 || (ann && ann->has_volatile_ops))
1520 changed = defs_to_varying (stmt);
1524 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1525 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1528 STRIP_USELESS_TYPE_CONVERSION (rhs);
1530 /* Shortcut for copies. Simplifying copies is pointless,
1531 since we copy the expression and value they represent. */
1532 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1534 changed = visit_copy (lhs, rhs);
1537 simplified = try_to_simplify (stmt, rhs);
1538 if (simplified && simplified != rhs)
1540 if (dump_file && (dump_flags & TDF_DETAILS))
1542 fprintf (dump_file, "RHS ");
1543 print_generic_expr (dump_file, rhs, 0);
1544 fprintf (dump_file, " simplified to ");
1545 print_generic_expr (dump_file, simplified, 0);
1546 if (TREE_CODE (lhs) == SSA_NAME)
1547 fprintf (dump_file, " has constants %d\n",
1548 VN_INFO (lhs)->has_constants);
1550 fprintf (dump_file, "\n");
1554 /* Setting value numbers to constants will occasionally
1555 screw up phi congruence because constants are not
1556 uniquely associated with a single ssa name that can be
1558 if (simplified && is_gimple_min_invariant (simplified)
1559 && TREE_CODE (lhs) == SSA_NAME
1560 && simplified != rhs)
1562 VN_INFO (lhs)->expr = simplified;
1563 VN_INFO (lhs)->has_constants = true;
1564 changed = set_ssa_val_to (lhs, simplified);
1567 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1568 && TREE_CODE (lhs) == SSA_NAME)
1570 changed = visit_copy (lhs, simplified);
1573 else if (simplified)
1575 if (TREE_CODE (lhs) == SSA_NAME)
1577 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1578 /* We have to unshare the expression or else
1579 valuizing may change the IL stream. */
1580 VN_INFO (lhs)->expr = unshare_expr (simplified);
1584 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1586 VN_INFO (lhs)->has_constants = true;
1587 VN_INFO (lhs)->expr = unshare_expr (rhs);
1589 else if (TREE_CODE (lhs) == SSA_NAME)
1591 /* We reset expr and constantness here because we may
1592 have been value numbering optimistically, and
1593 iterating. They may become non-constant in this case,
1594 even if they were optimistically constant. */
1596 VN_INFO (lhs)->has_constants = false;
1597 VN_INFO (lhs)->expr = lhs;
1600 if (TREE_CODE (lhs) == SSA_NAME
1601 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1602 changed = defs_to_varying (stmt);
1603 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1605 changed = visit_reference_op_store (lhs, rhs, stmt);
1607 else if (TREE_CODE (lhs) == SSA_NAME)
1609 if (is_gimple_min_invariant (rhs))
1611 VN_INFO (lhs)->has_constants = true;
1612 VN_INFO (lhs)->expr = rhs;
1613 changed = set_ssa_val_to (lhs, rhs);
1617 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1620 changed = visit_unary_op (lhs, rhs);
1623 changed = visit_binary_op (lhs, rhs);
1625 /* If tcc_vl_expr ever encompasses more than
1626 CALL_EXPR, this will need to be changed. */
1628 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1629 changed = visit_reference_op_load (lhs, rhs, stmt);
1631 changed = defs_to_varying (stmt);
1633 case tcc_declaration:
1635 changed = visit_reference_op_load (lhs, rhs, stmt);
1637 case tcc_expression:
1638 if (TREE_CODE (rhs) == ADDR_EXPR)
1640 changed = visit_unary_op (lhs, rhs);
1645 changed = defs_to_varying (stmt);
1651 changed = defs_to_varying (stmt);
1658 /* Compare two operands by reverse postorder index */
1661 compare_ops (const void *pa, const void *pb)
1663 const tree opa = *((const tree *)pa);
1664 const tree opb = *((const tree *)pb);
1665 tree opstmta = SSA_NAME_DEF_STMT (opa);
1666 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1670 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1672 else if (IS_EMPTY_STMT (opstmta))
1674 else if (IS_EMPTY_STMT (opstmtb))
1677 bba = bb_for_stmt (opstmta);
1678 bbb = bb_for_stmt (opstmtb);
1689 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1691 else if (TREE_CODE (opstmta) == PHI_NODE)
1693 else if (TREE_CODE (opstmtb) == PHI_NODE)
1695 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1697 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1700 /* Sort an array containing members of a strongly connected component
1701 SCC so that the members are ordered by RPO number.
1702 This means that when the sort is complete, iterating through the
1703 array will give you the members in RPO order. */
1706 sort_scc (VEC (tree, heap) *scc)
1708 qsort (VEC_address (tree, scc),
1709 VEC_length (tree, scc),
1714 /* Process a strongly connected component in the SSA graph. */
1717 process_scc (VEC (tree, heap) *scc)
1719 /* If the SCC has a single member, just visit it. */
1721 if (VEC_length (tree, scc) == 1)
1723 tree use = VEC_index (tree, scc, 0);
1724 if (!VN_INFO (use)->use_processed)
1731 unsigned int iterations = 0;
1732 bool changed = true;
1734 /* Iterate over the SCC with the optimistic table until it stops
1736 current_info = optimistic_info;
1741 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1742 changed |= visit_use (var);
1745 if (dump_file && (dump_flags & TDF_STATS))
1746 fprintf (dump_file, "Processing SCC required %d iterations\n",
1749 /* Finally, visit the SCC once using the valid table. */
1750 current_info = valid_info;
1751 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1756 /* Depth first search on NAME to discover and process SCC's in the SSA
1758 Execution of this algorithm relies on the fact that the SCC's are
1759 popped off the stack in topological order. */
1769 VN_INFO (name)->dfsnum = next_dfs_num++;
1770 VN_INFO (name)->visited = true;
1771 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1773 VEC_safe_push (tree, heap, sccstack, name);
1774 VN_INFO (name)->on_sccstack = true;
1775 defstmt = SSA_NAME_DEF_STMT (name);
1777 /* Recursively DFS on our operands, looking for SCC's. */
1778 if (!IS_EMPTY_STMT (defstmt))
1780 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1783 tree use = USE_FROM_PTR (usep);
1785 /* Since we handle phi nodes, we will sometimes get
1786 invariants in the use expression. */
1787 if (TREE_CODE (use) != SSA_NAME)
1790 if (! (VN_INFO (use)->visited))
1793 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1794 VN_INFO (use)->low);
1796 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1797 && VN_INFO (use)->on_sccstack)
1799 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1800 VN_INFO (name)->low);
1805 /* See if we found an SCC. */
1806 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1808 VEC (tree, heap) *scc = NULL;
1811 /* Found an SCC, pop the components off the SCC stack and
1815 x = VEC_pop (tree, sccstack);
1817 VN_INFO (x)->on_sccstack = false;
1818 VEC_safe_push (tree, heap, scc, x);
1819 } while (x != name);
1821 if (VEC_length (tree, scc) > 1)
1824 if (dump_file && (dump_flags & TDF_DETAILS))
1825 print_scc (dump_file, scc);
1829 VEC_free (tree, heap, scc);
1837 VEC_free (tree, heap, phi->phiargs);
1841 /* Free a reference operation structure VP. */
1844 free_reference (void *vp)
1846 vn_reference_t vr = vp;
1847 VEC_free (vn_reference_op_s, heap, vr->operands);
1850 /* Allocate a value number table. */
1853 allocate_vn_table (vn_tables_t table)
1855 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1856 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1857 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1858 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1861 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1862 sizeof (struct vn_unary_op_s),
1864 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1865 sizeof (struct vn_binary_op_s),
1867 table->phis_pool = create_alloc_pool ("VN phis",
1868 sizeof (struct vn_phi_s),
1870 table->references_pool = create_alloc_pool ("VN references",
1871 sizeof (struct vn_reference_s),
1875 /* Free a value number table. */
1878 free_vn_table (vn_tables_t table)
1880 htab_delete (table->phis);
1881 htab_delete (table->unary);
1882 htab_delete (table->binary);
1883 htab_delete (table->references);
1884 free_alloc_pool (table->unary_op_pool);
1885 free_alloc_pool (table->binary_op_pool);
1886 free_alloc_pool (table->phis_pool);
1887 free_alloc_pool (table->references_pool);
1895 int *rpo_numbers_temp;
1899 calculate_dominance_info (CDI_DOMINATORS);
1903 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1904 /* VEC_alloc doesn't actually grow it to the right size, it just
1905 preallocates the space to do so. */
1906 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1907 shared_lookup_phiargs = NULL;
1908 shared_lookup_vops = NULL;
1909 shared_lookup_references = NULL;
1910 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1911 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1912 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1914 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1915 the i'th block in RPO order is bb. We want to map bb's to RPO
1916 numbers, so we need to rearrange this array. */
1917 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1918 rpo_numbers[rpo_numbers_temp[j]] = j;
1920 free (rpo_numbers_temp);
1922 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1924 /* Create the VN_INFO structures, and initialize value numbers to
1926 for (i = 0; i < num_ssa_names; i++)
1928 tree name = ssa_name (i);
1931 VN_INFO_GET (name)->valnum = VN_TOP;
1932 VN_INFO (name)->expr = name;
1938 block_stmt_iterator bsi;
1939 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1941 tree stmt = bsi_stmt (bsi);
1942 stmt_ann (stmt)->uid = id++;
1946 /* Create the valid and optimistic value numbering tables. */
1947 valid_info = XCNEW (struct vn_tables_s);
1948 allocate_vn_table (valid_info);
1949 optimistic_info = XCNEW (struct vn_tables_s);
1950 allocate_vn_table (optimistic_info);
1955 switch_to_PRE_table (void)
1957 pre_info = XCNEW (struct vn_tables_s);
1958 allocate_vn_table (pre_info);
1959 current_info = pre_info;
1967 VEC_free (tree, heap, shared_lookup_phiargs);
1968 VEC_free (tree, gc, shared_lookup_vops);
1969 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
1970 XDELETEVEC (rpo_numbers);
1971 for (i = 0; i < num_ssa_names; i++)
1973 tree name = ssa_name (i);
1976 XDELETE (VN_INFO (name));
1977 if (SSA_NAME_VALUE (name) &&
1978 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
1979 SSA_NAME_VALUE (name) = NULL;
1983 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
1984 VEC_free (tree, heap, sccstack);
1985 free_vn_table (valid_info);
1986 XDELETE (valid_info);
1987 free_vn_table (optimistic_info);
1988 XDELETE (optimistic_info);
1991 free_vn_table (pre_info);
2003 current_info = valid_info;
2005 for (param = DECL_ARGUMENTS (current_function_decl);
2007 param = TREE_CHAIN (param))
2009 if (gimple_default_def (cfun, param) != NULL)
2011 tree def = gimple_default_def (cfun, param);
2012 SSA_VAL (def) = def;
2016 for (i = num_ssa_names - 1; i > 0; i--)
2018 tree name = ssa_name (i);
2020 && VN_INFO (name)->visited == false
2021 && !has_zero_uses (name))
2025 if (dump_file && (dump_flags & TDF_DETAILS))
2027 fprintf (dump_file, "Value numbers:\n");
2028 for (i = 0; i < num_ssa_names; i++)
2030 tree name = ssa_name (i);
2031 if (name && VN_INFO (name)->visited
2032 && (SSA_VAL (name) != name
2033 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2035 print_generic_expr (dump_file, name, 0);
2036 fprintf (dump_file, " = ");
2037 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2038 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2040 print_generic_expr (dump_file, SSA_VAL (name), 0);
2041 fprintf (dump_file, "\n");