1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
48 /* This algorithm is based on the SCC algorithm presented by Keith
49 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
50 (http://citeseer.ist.psu.edu/41805.html). In
51 straight line code, it is equivalent to a regular hash based value
52 numbering that is performed in reverse postorder.
54 For code with cycles, there are two alternatives, both of which
55 require keeping the hashtables separate from the actual list of
56 value numbers for SSA names.
58 1. Iterate value numbering in an RPO walk of the blocks, removing
59 all the entries from the hashtable after each iteration (but
60 keeping the SSA name->value number mapping between iterations).
61 Iterate until it does not change.
63 2. Perform value numbering as part of an SCC walk on the SSA graph,
64 iterating only the cycles in the SSA graph until they do not change
65 (using a separate, optimistic hashtable for value numbering the SCC
68 The second is not just faster in practice (because most SSA graph
69 cycles do not involve all the variables in the graph), it also has
72 One of these nice properties is that when we pop an SCC off the
73 stack, we are guaranteed to have processed all the operands coming from
74 *outside of that SCC*, so we do not need to do anything special to
75 ensure they have value numbers.
77 Another nice property is that the SCC walk is done as part of a DFS
78 of the SSA graph, which makes it easy to perform combining and
79 simplifying operations at the same time.
81 The code below is deliberately written in a way that makes it easy
82 to separate the SCC walk from the other work it does.
84 In order to propagate constants through the code, we track which
85 expressions contain constants, and use those while folding. In
86 theory, we could also track expressions whose value numbers are
87 replaced, in case we end up folding based on expression
90 In order to value number memory, we assign value numbers to vuses.
91 This enables us to note that, for example, stores to the same
92 address of the same value from the same starting memory states are
96 1. We can iterate only the changing portions of the SCC's, but
97 I have not seen an SCC big enough for this to be a win.
98 2. If you differentiate between phi nodes for loops and phi nodes
99 for if-then-else, you can properly consider phi nodes in different
100 blocks for equivalence.
101 3. We could value number vuses in more cases, particularly, whole
105 /* The set of hashtables and alloc_pool's for their items. */
107 typedef struct vn_tables_s
113 alloc_pool unary_op_pool;
114 alloc_pool binary_op_pool;
115 alloc_pool phis_pool;
116 alloc_pool references_pool;
119 /* Binary operations in the hashtable consist of two operands, an
120 opcode, and a type. Result is the value number of the operation,
121 and hashcode is stored to avoid having to calculate it
124 typedef struct vn_binary_op_s
126 enum tree_code opcode;
133 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
135 /* Unary operations in the hashtable consist of a single operand, an
136 opcode, and a type. Result is the value number of the operation,
137 and hashcode is stored to avoid having to calculate it repeatedly. */
139 typedef struct vn_unary_op_s
141 enum tree_code opcode;
147 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
149 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
150 arguments, and the basic block the phi is in. Result is the value
151 number of the operation, and hashcode is stored to avoid having to
152 calculate it repeatedly. Phi nodes not in the same block are never
153 considered equivalent. */
155 typedef struct vn_phi_s
157 VEC (tree, heap) *phiargs;
162 typedef const struct vn_phi_s *const_vn_phi_t;
164 /* Reference operands only exist in reference operations structures.
165 They consist of an opcode, type, and some number of operands. For
166 a given opcode, some, all, or none of the operands may be used.
167 The operands are there to store the information that makes up the
168 portion of the addressing calculation that opcode performs. */
170 typedef struct vn_reference_op_struct
172 enum tree_code opcode;
178 typedef vn_reference_op_s *vn_reference_op_t;
179 typedef const vn_reference_op_s *const_vn_reference_op_t;
181 DEF_VEC_O(vn_reference_op_s);
182 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
184 /* A reference operation in the hashtable is representation as a
185 collection of vuses, representing the memory state at the time of
186 the operation, and a collection of operands that make up the
187 addressing calculation. If two vn_reference_t's have the same set
188 of operands, they access the same memory location. We also store
189 the resulting value number, and the hashcode. The vuses are
190 always stored in order sorted by ssa name version. */
192 typedef struct vn_reference_s
194 VEC (tree, gc) *vuses;
195 VEC (vn_reference_op_s, heap) *operands;
199 typedef const struct vn_reference_s *const_vn_reference_t;
201 /* Valid hashtables storing information we have proven to be
204 static vn_tables_t valid_info;
206 /* Optimistic hashtables storing information we are making assumptions about
207 during iterations. */
209 static vn_tables_t optimistic_info;
211 /* PRE hashtables storing information about mapping from expressions to
214 static vn_tables_t pre_info;
216 /* Pointer to the set of hashtables that is currently being used.
217 Should always point to either the optimistic_info, or the
220 static vn_tables_t current_info;
223 /* Reverse post order index for each basic block. */
225 static int *rpo_numbers;
227 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
229 /* This represents the top of the VN lattice, which is the universal
234 /* Next DFS number and the stack for strongly connected component
237 static unsigned int next_dfs_num;
238 static VEC (tree, heap) *sccstack;
240 DEF_VEC_P(vn_ssa_aux_t);
241 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
243 /* Table of vn_ssa_aux_t's, one per ssa_name. */
245 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
247 /* Return the value numbering information for a given SSA name. */
252 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name));
256 /* Set the value numbering info for a given SSA name to a given
260 VN_INFO_SET (tree name, vn_ssa_aux_t value)
262 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
263 SSA_NAME_VERSION (name), value);
266 /* Get the value numbering info for a given SSA name, creating it if
267 it does not exist. */
270 VN_INFO_GET (tree name)
272 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
273 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
274 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
275 SSA_NAME_VERSION (name) + 1);
276 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
277 SSA_NAME_VERSION (name), newinfo);
282 /* Compare two reference operands P1 and P2 for equality. return true if
283 they are equal, and false otherwise. */
286 vn_reference_op_eq (const void *p1, const void *p2)
288 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
289 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
290 return vro1->opcode == vro2->opcode
291 && vro1->type == vro2->type
292 && expressions_equal_p (vro1->op0, vro2->op0)
293 && expressions_equal_p (vro1->op1, vro2->op1)
294 && expressions_equal_p (vro1->op2, vro2->op2);
297 /* Compute the hash for a reference operand VRO1 */
300 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
302 return iterative_hash_expr (vro1->op0, vro1->opcode)
303 + iterative_hash_expr (vro1->op1, vro1->opcode)
304 + iterative_hash_expr (vro1->op2, vro1->opcode);
307 /* Return the hashcode for a given reference operation P1. */
310 vn_reference_hash (const void *p1)
312 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
313 return vr1->hashcode;
316 /* Compute a hash for the reference operation VR1 and return it. */
318 static inline hashval_t
319 vn_reference_compute_hash (const vn_reference_t vr1)
321 hashval_t result = 0;
324 vn_reference_op_t vro;
326 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
327 result += iterative_hash_expr (v, 0);
328 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
329 result += vn_reference_op_compute_hash (vro);
334 /* Return true if reference operations P1 and P2 are equivalent. This
335 means they have the same set of operands and vuses. */
338 vn_reference_eq (const void *p1, const void *p2)
342 vn_reference_op_t vro;
344 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
345 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
347 if (vr1->vuses == vr2->vuses
348 && vr1->operands == vr2->operands)
351 /* Impossible for them to be equivalent if they have different
353 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
356 /* We require that address operands be canonicalized in a way that
357 two memory references will have the same operands if they are
359 if (VEC_length (vn_reference_op_s, vr1->operands)
360 != VEC_length (vn_reference_op_s, vr2->operands))
363 /* The memory state is more often different than the address of the
364 store/load, so check it first. */
365 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
367 if (VEC_index (tree, vr2->vuses, i) != v)
371 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
373 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
380 /* Place the vuses from STMT into *result */
383 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
391 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
392 VEC_safe_push (tree, gc, *result, vuse);
394 if (VEC_length (tree, *result) > 1)
395 sort_vuses (*result);
399 /* Copy the VUSE names in STMT into a vector, and return
403 copy_vuses_from_stmt (tree stmt)
405 VEC (tree, gc) *vuses = NULL;
407 vuses_to_vec (stmt, &vuses);
412 /* Place the vdefs from STMT into *result */
415 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
423 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
424 VEC_safe_push (tree, gc, *result, vdef);
426 if (VEC_length (tree, *result) > 1)
427 sort_vuses (*result);
430 /* Copy the names of vdef results in STMT into a vector, and return
433 static VEC (tree, gc) *
434 copy_vdefs_from_stmt (tree stmt)
436 VEC (tree, gc) *vdefs = NULL;
438 vdefs_to_vec (stmt, &vdefs);
443 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
444 static VEC (tree, gc) *shared_lookup_vops;
446 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
447 This function will overwrite the current SHARED_LOOKUP_VOPS
451 shared_vuses_from_stmt (tree stmt)
453 VEC_truncate (tree, shared_lookup_vops, 0);
454 vuses_to_vec (stmt, &shared_lookup_vops);
456 return shared_lookup_vops;
459 /* Copy the operations present in load/store/call REF into RESULT, a vector of
460 vn_reference_op_s's. */
463 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
465 /* Calls are different from all other reference operations. */
466 if (TREE_CODE (ref) == CALL_EXPR)
468 vn_reference_op_s temp;
470 call_expr_arg_iterator iter;
473 /* Copy the call_expr opcode, type, function being called, and
475 memset (&temp, 0, sizeof (temp));
476 temp.type = TREE_TYPE (ref);
477 temp.opcode = CALL_EXPR;
478 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
480 callfn = get_callee_fndecl (ref);
482 callfn = CALL_EXPR_FN (ref);
483 temp.type = TREE_TYPE (callfn);
484 temp.opcode = TREE_CODE (callfn);
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
490 memset (&temp, 0, sizeof (temp));
491 temp.type = TREE_TYPE (callarg);
492 temp.opcode = TREE_CODE (callarg);
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
499 /* For non-calls, store the information that makes up the address. */
503 vn_reference_op_s temp;
505 memset (&temp, 0, sizeof (temp));
506 temp.type = TREE_TYPE (ref);
507 temp.opcode = TREE_CODE (ref);
511 case ALIGN_INDIRECT_REF:
512 case MISALIGNED_INDIRECT_REF:
514 /* The only operand is the address, which gets its own
515 vn_reference_op_s structure. */
518 /* Record bits and position. */
519 temp.op0 = TREE_OPERAND (ref, 1);
520 temp.op1 = TREE_OPERAND (ref, 2);
523 /* Record field as operand. */
524 temp.op0 = TREE_OPERAND (ref, 1);
526 case ARRAY_RANGE_REF:
528 /* Record index as operand. */
529 temp.op0 = TREE_OPERAND (ref, 1);
530 temp.op1 = TREE_OPERAND (ref, 3);
545 /* These are only interesting for their operands, their
546 existence, and their type. They will never be the last
547 ref in the chain of references (IE they require an
548 operand), so we don't have to put anything
549 for op* as it will be handled by the iteration */
552 case VIEW_CONVERT_EXPR:
559 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
561 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
562 ref = TREE_OPERAND (ref, 0);
568 /* Create a vector of vn_reference_op_s structures from REF, a
569 REFERENCE_CLASS_P tree. The vector is not shared. */
571 static VEC(vn_reference_op_s, heap) *
572 create_reference_ops_from_ref (tree ref)
574 VEC (vn_reference_op_s, heap) *result = NULL;
576 copy_reference_ops_from_ref (ref, &result);
580 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
582 /* Create a vector of vn_reference_op_s structures from REF, a
583 REFERENCE_CLASS_P tree. The vector is shared among all callers of
586 static VEC(vn_reference_op_s, heap) *
587 shared_reference_ops_from_ref (tree ref)
591 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
592 copy_reference_ops_from_ref (ref, &shared_lookup_references);
593 return shared_lookup_references;
597 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
598 structures into their value numbers. This is done in-place, and
599 the vector passed in is returned. */
601 static VEC (vn_reference_op_s, heap) *
602 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
604 vn_reference_op_t vro;
607 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
609 if (vro->opcode == SSA_NAME
610 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
611 vro->op0 = SSA_VAL (vro->op0);
617 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
618 their value numbers. This is done in-place, and the vector passed
621 static VEC (tree, gc) *
622 valueize_vuses (VEC (tree, gc) *orig)
624 bool made_replacement = false;
628 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
630 if (vuse != SSA_VAL (vuse))
632 made_replacement = true;
633 VEC_replace (tree, orig, i, SSA_VAL (vuse));
637 if (made_replacement && VEC_length (tree, orig) > 1)
643 /* Lookup OP in the current hash table, and return the resulting
644 value number if it exists in the hash table. Return NULL_TREE if
645 it does not exist in the hash table. */
648 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
651 struct vn_reference_s vr1;
653 vr1.vuses = valueize_vuses (vuses);
654 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
655 vr1.hashcode = vn_reference_compute_hash (&vr1);
656 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
658 if (!slot && current_info == optimistic_info)
659 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
664 return ((vn_reference_t)*slot)->result;
667 /* Insert OP into the current hash table with a value number of
671 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
676 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
678 vr1->vuses = valueize_vuses (vuses);
679 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
680 vr1->hashcode = vn_reference_compute_hash (vr1);
681 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
683 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
686 /* Because we lookup stores using vuses, and value number failures
687 using the vdefs (see visit_reference_op_store for how and why),
688 it's possible that on failure we may try to insert an already
689 inserted store. This is not wrong, there is no ssa name for a
690 store that we could use as a differentiator anyway. Thus, unlike
691 the other lookup functions, you cannot gcc_assert (!*slot)
699 /* Return the stored hashcode for a unary operation. */
702 vn_unary_op_hash (const void *p1)
704 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
705 return vuo1->hashcode;
708 /* Hash a unary operation P1 and return the result. */
710 static inline hashval_t
711 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
713 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
716 /* Return true if P1 and P2, two unary operations, are equivalent. */
719 vn_unary_op_eq (const void *p1, const void *p2)
721 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
722 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
723 return vuo1->opcode == vuo2->opcode
724 && vuo1->type == vuo2->type
725 && expressions_equal_p (vuo1->op0, vuo2->op0);
728 /* Lookup OP in the current hash table, and return the resulting
729 value number if it exists in the hash table. Return NULL_TREE if
730 it does not exist in the hash table. */
733 vn_unary_op_lookup (tree op)
736 struct vn_unary_op_s vuo1;
738 vuo1.opcode = TREE_CODE (op);
739 vuo1.type = TREE_TYPE (op);
740 vuo1.op0 = TREE_OPERAND (op, 0);
742 if (TREE_CODE (vuo1.op0) == SSA_NAME)
743 vuo1.op0 = SSA_VAL (vuo1.op0);
745 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
746 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
748 if (!slot && current_info == optimistic_info)
749 slot = htab_find_slot_with_hash (valid_info->unary, &vuo1, vuo1.hashcode,
753 return ((vn_unary_op_t)*slot)->result;
756 /* Insert OP into the current hash table with a value number of
760 vn_unary_op_insert (tree op, tree result)
763 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
765 vuo1->opcode = TREE_CODE (op);
766 vuo1->type = TREE_TYPE (op);
767 vuo1->op0 = TREE_OPERAND (op, 0);
768 vuo1->result = result;
770 if (TREE_CODE (vuo1->op0) == SSA_NAME)
771 vuo1->op0 = SSA_VAL (vuo1->op0);
773 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
774 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
780 /* Compute and return the hash value for binary operation VBO1. */
782 static inline hashval_t
783 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
785 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
786 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
789 /* Return the computed hashcode for binary operation P1. */
792 vn_binary_op_hash (const void *p1)
794 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
795 return vbo1->hashcode;
798 /* Compare binary operations P1 and P2 and return true if they are
802 vn_binary_op_eq (const void *p1, const void *p2)
804 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
805 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
806 return vbo1->opcode == vbo2->opcode
807 && vbo1->type == vbo2->type
808 && expressions_equal_p (vbo1->op0, vbo2->op0)
809 && expressions_equal_p (vbo1->op1, vbo2->op1);
812 /* Lookup OP in the current hash table, and return the resulting
813 value number if it exists in the hash table. Return NULL_TREE if
814 it does not exist in the hash table. */
817 vn_binary_op_lookup (tree op)
820 struct vn_binary_op_s vbo1;
822 vbo1.opcode = TREE_CODE (op);
823 vbo1.type = TREE_TYPE (op);
824 vbo1.op0 = TREE_OPERAND (op, 0);
825 vbo1.op1 = TREE_OPERAND (op, 1);
827 if (TREE_CODE (vbo1.op0) == SSA_NAME)
828 vbo1.op0 = SSA_VAL (vbo1.op0);
829 if (TREE_CODE (vbo1.op1) == SSA_NAME)
830 vbo1.op1 = SSA_VAL (vbo1.op1);
832 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
833 && commutative_tree_code (vbo1.opcode))
835 tree temp = vbo1.op0;
840 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
841 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
843 if (!slot && current_info == optimistic_info)
844 slot = htab_find_slot_with_hash (valid_info->binary, &vbo1, vbo1.hashcode,
848 return ((vn_binary_op_t)*slot)->result;
851 /* Insert OP into the current hash table with a value number of
855 vn_binary_op_insert (tree op, tree result)
859 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
861 vbo1->opcode = TREE_CODE (op);
862 vbo1->type = TREE_TYPE (op);
863 vbo1->op0 = TREE_OPERAND (op, 0);
864 vbo1->op1 = TREE_OPERAND (op, 1);
865 vbo1->result = result;
867 if (TREE_CODE (vbo1->op0) == SSA_NAME)
868 vbo1->op0 = SSA_VAL (vbo1->op0);
869 if (TREE_CODE (vbo1->op1) == SSA_NAME)
870 vbo1->op1 = SSA_VAL (vbo1->op1);
872 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
873 && commutative_tree_code (vbo1->opcode))
875 tree temp = vbo1->op0;
876 vbo1->op0 = vbo1->op1;
879 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
880 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
887 /* Compute a hashcode for PHI operation VP1 and return it. */
889 static inline hashval_t
890 vn_phi_compute_hash (vn_phi_t vp1)
892 hashval_t result = 0;
896 result = vp1->block->index;
898 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
900 if (phi1op == VN_TOP)
902 result += iterative_hash_expr (phi1op, result);
908 /* Return the computed hashcode for phi operation P1. */
911 vn_phi_hash (const void *p1)
913 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
914 return vp1->hashcode;
917 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
920 vn_phi_eq (const void *p1, const void *p2)
922 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
923 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
925 if (vp1->block == vp2->block)
930 /* Any phi in the same block will have it's arguments in the
931 same edge order, because of how we store phi nodes. */
932 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
934 tree phi2op = VEC_index (tree, vp2->phiargs, i);
935 if (phi1op == VN_TOP || phi2op == VN_TOP)
937 if (!expressions_equal_p (phi1op, phi2op))
945 static VEC(tree, heap) *shared_lookup_phiargs;
947 /* Lookup PHI in the current hash table, and return the resulting
948 value number if it exists in the hash table. Return NULL_TREE if
949 it does not exist in the hash table. */
952 vn_phi_lookup (tree phi)
958 VEC_truncate (tree, shared_lookup_phiargs, 0);
960 /* Canonicalize the SSA_NAME's to their value number. */
961 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
963 tree def = PHI_ARG_DEF (phi, i);
964 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
965 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
967 vp1.phiargs = shared_lookup_phiargs;
968 vp1.block = bb_for_stmt (phi);
969 vp1.hashcode = vn_phi_compute_hash (&vp1);
970 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
972 if (!slot && current_info == optimistic_info)
973 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
977 return ((vn_phi_t)*slot)->result;
980 /* Insert PHI into the current hash table with a value number of
984 vn_phi_insert (tree phi, tree result)
987 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
989 VEC (tree, heap) *args = NULL;
991 /* Canonicalize the SSA_NAME's to their value number. */
992 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
994 tree def = PHI_ARG_DEF (phi, i);
995 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
996 VEC_safe_push (tree, heap, args, def);
999 vp1->block = bb_for_stmt (phi);
1000 vp1->result = result;
1001 vp1->hashcode = vn_phi_compute_hash (vp1);
1003 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1006 /* Because we iterate over phi operations more than once, it's
1007 possible the slot might already exist here, hence no assert.*/
1012 /* Print set of components in strongly connected component SCC to OUT. */
1015 print_scc (FILE *out, VEC (tree, heap) *scc)
1020 fprintf (out, "SCC consists of: ");
1021 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1023 print_generic_expr (out, var, 0);
1026 fprintf (out, "\n");
1029 /* Set the value number of FROM to TO, return true if it has changed
1033 set_ssa_val_to (tree from, tree to)
1038 && TREE_CODE (to) == SSA_NAME
1039 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1042 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1043 and invariants. So assert that here. */
1044 gcc_assert (to != NULL_TREE
1046 || TREE_CODE (to) == SSA_NAME
1047 || is_gimple_min_invariant (to)));
1049 if (dump_file && (dump_flags & TDF_DETAILS))
1051 fprintf (dump_file, "Setting value number of ");
1052 print_generic_expr (dump_file, from, 0);
1053 fprintf (dump_file, " to ");
1054 print_generic_expr (dump_file, to, 0);
1055 fprintf (dump_file, "\n");
1058 currval = SSA_VAL (from);
1060 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1062 SSA_VAL (from) = to;
1068 /* Set all definitions in STMT to value number to themselves.
1069 Return true if a value number changed. */
1072 defs_to_varying (tree stmt)
1074 bool changed = false;
1078 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1080 tree def = DEF_FROM_PTR (defp);
1082 VN_INFO (def)->use_processed = true;
1083 changed |= set_ssa_val_to (def, def);
1088 /* Visit a copy between LHS and RHS, return true if the value number
1092 visit_copy (tree lhs, tree rhs)
1095 /* Follow chains of copies to their destination. */
1096 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1097 rhs = SSA_VAL (rhs);
1099 /* The copy may have a more interesting constant filled expression
1100 (we don't, since we know our RHS is just an SSA name). */
1101 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1102 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1104 return set_ssa_val_to (lhs, rhs);
1107 /* Visit a unary operator RHS, value number it, and return true if the
1108 value number of LHS has changed as a result. */
1111 visit_unary_op (tree lhs, tree op)
1113 bool changed = false;
1114 tree result = vn_unary_op_lookup (op);
1118 changed = set_ssa_val_to (lhs, result);
1122 changed = set_ssa_val_to (lhs, lhs);
1123 vn_unary_op_insert (op, lhs);
1129 /* Visit a binary operator RHS, value number it, and return true if the
1130 value number of LHS has changed as a result. */
1133 visit_binary_op (tree lhs, tree op)
1135 bool changed = false;
1136 tree result = vn_binary_op_lookup (op);
1140 changed = set_ssa_val_to (lhs, result);
1144 changed = set_ssa_val_to (lhs, lhs);
1145 vn_binary_op_insert (op, lhs);
1151 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1152 and return true if the value number of the LHS has changed as a result. */
1155 visit_reference_op_load (tree lhs, tree op, tree stmt)
1157 bool changed = false;
1158 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1162 changed = set_ssa_val_to (lhs, result);
1166 changed = set_ssa_val_to (lhs, lhs);
1167 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1174 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1175 and return true if the value number of the LHS has changed as a result. */
1178 visit_reference_op_store (tree lhs, tree op, tree stmt)
1180 bool changed = false;
1182 bool resultsame = false;
1184 /* First we want to lookup using the *vuses* from the store and see
1185 if there the last store to this location with the same address
1188 The vuses represent the memory state before the store. If the
1189 memory state, address, and value of the store is the same as the
1190 last store to this location, then this store will produce the
1191 same memory state as that store.
1193 In this case the vdef versions for this store are value numbered to those
1194 vuse versions, since they represent the same memory state after
1197 Otherwise, the vdefs for the store are used when inserting into
1198 the table, since the store generates a new memory state. */
1200 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1204 if (TREE_CODE (result) == SSA_NAME)
1205 result = SSA_VAL (result);
1206 resultsame = expressions_equal_p (result, op);
1209 if (!result || !resultsame)
1211 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1215 if (dump_file && (dump_flags & TDF_DETAILS))
1217 fprintf (dump_file, "No store match\n");
1218 fprintf (dump_file, "Value numbering store ");
1219 print_generic_expr (dump_file, lhs, 0);
1220 fprintf (dump_file, " to ");
1221 print_generic_expr (dump_file, op, 0);
1222 fprintf (dump_file, "\n");
1224 /* Have to set value numbers before insert, since insert is
1225 going to valueize the references in-place. */
1226 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1228 VN_INFO (vdef)->use_processed = true;
1229 changed |= set_ssa_val_to (vdef, vdef);
1232 vn_reference_insert (lhs, op, vdefs);
1236 /* We had a match, so value number the vdefs to have the value
1237 number of the vuses they came from. */
1238 ssa_op_iter op_iter;
1242 if (dump_file && (dump_flags & TDF_DETAILS))
1243 fprintf (dump_file, "Store matched earlier value,"
1244 "value numbering store vdefs to matching vuses.\n");
1246 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1248 tree def = DEF_FROM_PTR (var);
1251 /* Uh, if the vuse is a multiuse, we can't really do much
1252 here, sadly, since we don't know which value number of
1253 which vuse to use. */
1254 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1257 use = VUSE_ELEMENT_VAR (*vv, 0);
1259 VN_INFO (def)->use_processed = true;
1260 changed |= set_ssa_val_to (def, SSA_VAL (use));
1267 /* Visit and value number PHI, return true if the value number
1271 visit_phi (tree phi)
1273 bool changed = false;
1275 tree sameval = VN_TOP;
1276 bool allsame = true;
1279 /* TODO: We could check for this in init_sccvn, and replace this
1280 with a gcc_assert. */
1281 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1282 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1284 /* See if all non-TOP arguments have the same value. TOP is
1285 equivalent to everything, so we can ignore it. */
1286 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1288 tree def = PHI_ARG_DEF (phi, i);
1290 if (TREE_CODE (def) == SSA_NAME)
1291 def = SSA_VAL (def);
1294 if (sameval == VN_TOP)
1300 if (!expressions_equal_p (def, sameval))
1308 /* If all value numbered to the same value, the phi node has that
1312 if (is_gimple_min_invariant (sameval))
1314 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1315 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1319 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1320 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1323 if (TREE_CODE (sameval) == SSA_NAME)
1324 return visit_copy (PHI_RESULT (phi), sameval);
1326 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1329 /* Otherwise, see if it is equivalent to a phi node in this block. */
1330 result = vn_phi_lookup (phi);
1333 if (TREE_CODE (result) == SSA_NAME)
1334 changed = visit_copy (PHI_RESULT (phi), result);
1336 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1340 vn_phi_insert (phi, PHI_RESULT (phi));
1341 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1342 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1343 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1349 /* Return true if EXPR contains constants. */
1352 expr_has_constants (tree expr)
1354 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1357 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1360 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1361 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1362 /* Constants inside reference ops are rarely interesting, but
1363 it can take a lot of looking to find them. */
1365 case tcc_declaration:
1368 return is_gimple_min_invariant (expr);
1373 /* Replace SSA_NAMES in expr with their value numbers, and return the
1375 This is performed in place. */
1378 valueize_expr (tree expr)
1380 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1383 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1384 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1385 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1388 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1389 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1390 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1391 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1392 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1393 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1401 /* Simplify the binary expression RHS, and return the result if
1405 simplify_binary_expression (tree stmt, tree rhs)
1407 tree result = NULL_TREE;
1408 tree op0 = TREE_OPERAND (rhs, 0);
1409 tree op1 = TREE_OPERAND (rhs, 1);
1411 /* This will not catch every single case we could combine, but will
1412 catch those with constants. The goal here is to simultaneously
1413 combine constants between expressions, but avoid infinite
1414 expansion of expressions during simplification. */
1415 if (TREE_CODE (op0) == SSA_NAME)
1417 if (VN_INFO (op0)->has_constants)
1418 op0 = valueize_expr (VN_INFO (op0)->expr);
1419 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1420 op0 = SSA_VAL (op0);
1423 if (TREE_CODE (op1) == SSA_NAME)
1425 if (VN_INFO (op1)->has_constants)
1426 op1 = valueize_expr (VN_INFO (op1)->expr);
1427 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1428 op1 = SSA_VAL (op1);
1431 /* Avoid folding if nothing changed. */
1432 if (op0 == TREE_OPERAND (rhs, 0)
1433 && op1 == TREE_OPERAND (rhs, 1))
1436 fold_defer_overflow_warnings ();
1438 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1440 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1443 /* Make sure result is not a complex expression consisting
1444 of operators of operators (IE (a + b) + (a + c))
1445 Otherwise, we will end up with unbounded expressions if
1446 fold does anything at all. */
1447 if (result && valid_gimple_expression_p (result))
1453 /* Simplify the unary expression RHS, and return the result if
1457 simplify_unary_expression (tree rhs)
1459 tree result = NULL_TREE;
1460 tree op0 = TREE_OPERAND (rhs, 0);
1462 if (TREE_CODE (op0) != SSA_NAME)
1465 if (VN_INFO (op0)->has_constants)
1466 op0 = valueize_expr (VN_INFO (op0)->expr);
1467 else if (TREE_CODE (rhs) == NOP_EXPR
1468 || TREE_CODE (rhs) == CONVERT_EXPR
1469 || TREE_CODE (rhs) == REALPART_EXPR
1470 || TREE_CODE (rhs) == IMAGPART_EXPR)
1472 /* We want to do tree-combining on conversion-like expressions.
1473 Make sure we feed only SSA_NAMEs or constants to fold though. */
1474 tree tem = valueize_expr (VN_INFO (op0)->expr);
1475 if (UNARY_CLASS_P (tem)
1476 || BINARY_CLASS_P (tem)
1477 || TREE_CODE (tem) == SSA_NAME
1478 || is_gimple_min_invariant (tem))
1482 /* Avoid folding if nothing changed, but remember the expression. */
1483 if (op0 == TREE_OPERAND (rhs, 0))
1486 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1489 STRIP_USELESS_TYPE_CONVERSION (result);
1490 if (valid_gimple_expression_p (result))
1497 /* Try to simplify RHS using equivalences and constant folding. */
1500 try_to_simplify (tree stmt, tree rhs)
1502 if (TREE_CODE (rhs) == SSA_NAME)
1504 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1505 return SSA_VAL (rhs);
1506 else if (VN_INFO (rhs)->has_constants)
1507 return VN_INFO (rhs)->expr;
1511 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1513 /* For references, see if we find a result for the lookup,
1514 and use it if we do. */
1515 case tcc_declaration:
1516 /* Pull out any truly constant values. */
1517 if (TREE_READONLY (rhs)
1518 && TREE_STATIC (rhs)
1519 && DECL_INITIAL (rhs)
1520 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1521 return DECL_INITIAL (rhs);
1526 tree result = vn_reference_lookup (rhs,
1527 shared_vuses_from_stmt (stmt));
1531 /* Fallthrough for some codes. */
1532 if (!(TREE_CODE (rhs) == REALPART_EXPR
1533 || TREE_CODE (rhs) == IMAGPART_EXPR))
1535 /* We could do a little more with unary ops, if they expand
1536 into binary ops, but it's debatable whether it is worth it. */
1538 return simplify_unary_expression (rhs);
1540 case tcc_comparison:
1542 return simplify_binary_expression (stmt, rhs);
1551 /* Visit and value number USE, return true if the value number
1555 visit_use (tree use)
1557 bool changed = false;
1558 tree stmt = SSA_NAME_DEF_STMT (use);
1561 VN_INFO (use)->use_processed = true;
1563 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1564 if (dump_file && (dump_flags & TDF_DETAILS))
1566 fprintf (dump_file, "Value numbering ");
1567 print_generic_expr (dump_file, use, 0);
1568 fprintf (dump_file, " stmt = ");
1569 print_generic_stmt (dump_file, stmt, 0);
1572 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1573 if (TREE_CODE (stmt) == RETURN_EXPR
1574 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1575 stmt = TREE_OPERAND (stmt, 0);
1577 ann = stmt_ann (stmt);
1579 /* Handle uninitialized uses. */
1580 if (IS_EMPTY_STMT (stmt))
1582 changed = set_ssa_val_to (use, use);
1586 if (TREE_CODE (stmt) == PHI_NODE)
1588 changed = visit_phi (stmt);
1590 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1591 || (ann && ann->has_volatile_ops))
1593 changed = defs_to_varying (stmt);
1597 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1598 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1601 STRIP_USELESS_TYPE_CONVERSION (rhs);
1603 /* Shortcut for copies. Simplifying copies is pointless,
1604 since we copy the expression and value they represent. */
1605 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1607 changed = visit_copy (lhs, rhs);
1610 simplified = try_to_simplify (stmt, rhs);
1611 if (simplified && simplified != rhs)
1613 if (dump_file && (dump_flags & TDF_DETAILS))
1615 fprintf (dump_file, "RHS ");
1616 print_generic_expr (dump_file, rhs, 0);
1617 fprintf (dump_file, " simplified to ");
1618 print_generic_expr (dump_file, simplified, 0);
1619 if (TREE_CODE (lhs) == SSA_NAME)
1620 fprintf (dump_file, " has constants %d\n",
1621 VN_INFO (lhs)->has_constants);
1623 fprintf (dump_file, "\n");
1627 /* Setting value numbers to constants will occasionally
1628 screw up phi congruence because constants are not
1629 uniquely associated with a single ssa name that can be
1631 if (simplified && is_gimple_min_invariant (simplified)
1632 && TREE_CODE (lhs) == SSA_NAME
1633 && simplified != rhs)
1635 VN_INFO (lhs)->expr = simplified;
1636 VN_INFO (lhs)->has_constants = true;
1637 changed = set_ssa_val_to (lhs, simplified);
1640 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1641 && TREE_CODE (lhs) == SSA_NAME)
1643 changed = visit_copy (lhs, simplified);
1646 else if (simplified)
1648 if (TREE_CODE (lhs) == SSA_NAME)
1650 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1651 /* We have to unshare the expression or else
1652 valuizing may change the IL stream. */
1653 VN_INFO (lhs)->expr = unshare_expr (simplified);
1657 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1659 VN_INFO (lhs)->has_constants = true;
1660 VN_INFO (lhs)->expr = unshare_expr (rhs);
1662 else if (TREE_CODE (lhs) == SSA_NAME)
1664 /* We reset expr and constantness here because we may
1665 have been value numbering optimistically, and
1666 iterating. They may become non-constant in this case,
1667 even if they were optimistically constant. */
1669 VN_INFO (lhs)->has_constants = false;
1670 VN_INFO (lhs)->expr = lhs;
1673 if (TREE_CODE (lhs) == SSA_NAME
1674 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1675 changed = defs_to_varying (stmt);
1676 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1678 changed = visit_reference_op_store (lhs, rhs, stmt);
1680 else if (TREE_CODE (lhs) == SSA_NAME)
1682 if (is_gimple_min_invariant (rhs))
1684 VN_INFO (lhs)->has_constants = true;
1685 VN_INFO (lhs)->expr = rhs;
1686 changed = set_ssa_val_to (lhs, rhs);
1690 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1693 changed = visit_unary_op (lhs, rhs);
1696 changed = visit_binary_op (lhs, rhs);
1698 /* If tcc_vl_expr ever encompasses more than
1699 CALL_EXPR, this will need to be changed. */
1701 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1702 changed = visit_reference_op_load (lhs, rhs, stmt);
1704 changed = defs_to_varying (stmt);
1706 case tcc_declaration:
1708 changed = visit_reference_op_load (lhs, rhs, stmt);
1710 case tcc_expression:
1711 if (TREE_CODE (rhs) == ADDR_EXPR)
1713 changed = visit_unary_op (lhs, rhs);
1718 changed = defs_to_varying (stmt);
1724 changed = defs_to_varying (stmt);
1731 /* Compare two operands by reverse postorder index */
1734 compare_ops (const void *pa, const void *pb)
1736 const tree opa = *((const tree *)pa);
1737 const tree opb = *((const tree *)pb);
1738 tree opstmta = SSA_NAME_DEF_STMT (opa);
1739 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1743 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1745 else if (IS_EMPTY_STMT (opstmta))
1747 else if (IS_EMPTY_STMT (opstmtb))
1750 bba = bb_for_stmt (opstmta);
1751 bbb = bb_for_stmt (opstmtb);
1762 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1764 else if (TREE_CODE (opstmta) == PHI_NODE)
1766 else if (TREE_CODE (opstmtb) == PHI_NODE)
1768 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1770 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1773 /* Sort an array containing members of a strongly connected component
1774 SCC so that the members are ordered by RPO number.
1775 This means that when the sort is complete, iterating through the
1776 array will give you the members in RPO order. */
1779 sort_scc (VEC (tree, heap) *scc)
1781 qsort (VEC_address (tree, scc),
1782 VEC_length (tree, scc),
1787 /* Process a strongly connected component in the SSA graph. */
1790 process_scc (VEC (tree, heap) *scc)
1792 /* If the SCC has a single member, just visit it. */
1794 if (VEC_length (tree, scc) == 1)
1796 tree use = VEC_index (tree, scc, 0);
1797 if (!VN_INFO (use)->use_processed)
1804 unsigned int iterations = 0;
1805 bool changed = true;
1807 /* Iterate over the SCC with the optimistic table until it stops
1809 current_info = optimistic_info;
1814 htab_empty (optimistic_info->unary);
1815 htab_empty (optimistic_info->binary);
1816 htab_empty (optimistic_info->phis);
1817 htab_empty (optimistic_info->references);
1818 empty_alloc_pool (optimistic_info->unary_op_pool);
1819 empty_alloc_pool (optimistic_info->binary_op_pool);
1820 empty_alloc_pool (optimistic_info->phis_pool);
1821 empty_alloc_pool (optimistic_info->references_pool);
1822 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1823 changed |= visit_use (var);
1826 if (dump_file && (dump_flags & TDF_STATS))
1827 fprintf (dump_file, "Processing SCC required %d iterations\n",
1830 /* Finally, visit the SCC once using the valid table. */
1831 current_info = valid_info;
1832 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1837 /* Depth first search on NAME to discover and process SCC's in the SSA
1839 Execution of this algorithm relies on the fact that the SCC's are
1840 popped off the stack in topological order. */
1850 VN_INFO (name)->dfsnum = next_dfs_num++;
1851 VN_INFO (name)->visited = true;
1852 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1854 VEC_safe_push (tree, heap, sccstack, name);
1855 VN_INFO (name)->on_sccstack = true;
1856 defstmt = SSA_NAME_DEF_STMT (name);
1858 /* Recursively DFS on our operands, looking for SCC's. */
1859 if (!IS_EMPTY_STMT (defstmt))
1861 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1864 tree use = USE_FROM_PTR (usep);
1866 /* Since we handle phi nodes, we will sometimes get
1867 invariants in the use expression. */
1868 if (TREE_CODE (use) != SSA_NAME)
1871 if (! (VN_INFO (use)->visited))
1874 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1875 VN_INFO (use)->low);
1877 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1878 && VN_INFO (use)->on_sccstack)
1880 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1881 VN_INFO (name)->low);
1886 /* See if we found an SCC. */
1887 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1889 VEC (tree, heap) *scc = NULL;
1892 /* Found an SCC, pop the components off the SCC stack and
1896 x = VEC_pop (tree, sccstack);
1898 VN_INFO (x)->on_sccstack = false;
1899 VEC_safe_push (tree, heap, scc, x);
1900 } while (x != name);
1902 if (VEC_length (tree, scc) > 1)
1905 if (dump_file && (dump_flags & TDF_DETAILS))
1906 print_scc (dump_file, scc);
1910 VEC_free (tree, heap, scc);
1918 VEC_free (tree, heap, phi->phiargs);
1922 /* Free a reference operation structure VP. */
1925 free_reference (void *vp)
1927 vn_reference_t vr = vp;
1928 VEC_free (vn_reference_op_s, heap, vr->operands);
1931 /* Allocate a value number table. */
1934 allocate_vn_table (vn_tables_t table)
1936 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1937 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1938 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1939 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1942 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1943 sizeof (struct vn_unary_op_s),
1945 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1946 sizeof (struct vn_binary_op_s),
1948 table->phis_pool = create_alloc_pool ("VN phis",
1949 sizeof (struct vn_phi_s),
1951 table->references_pool = create_alloc_pool ("VN references",
1952 sizeof (struct vn_reference_s),
1956 /* Free a value number table. */
1959 free_vn_table (vn_tables_t table)
1961 htab_delete (table->phis);
1962 htab_delete (table->unary);
1963 htab_delete (table->binary);
1964 htab_delete (table->references);
1965 free_alloc_pool (table->unary_op_pool);
1966 free_alloc_pool (table->binary_op_pool);
1967 free_alloc_pool (table->phis_pool);
1968 free_alloc_pool (table->references_pool);
1976 int *rpo_numbers_temp;
1980 calculate_dominance_info (CDI_DOMINATORS);
1984 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1985 /* VEC_alloc doesn't actually grow it to the right size, it just
1986 preallocates the space to do so. */
1987 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1988 shared_lookup_phiargs = NULL;
1989 shared_lookup_vops = NULL;
1990 shared_lookup_references = NULL;
1991 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1992 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1993 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1995 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1996 the i'th block in RPO order is bb. We want to map bb's to RPO
1997 numbers, so we need to rearrange this array. */
1998 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1999 rpo_numbers[rpo_numbers_temp[j]] = j;
2001 free (rpo_numbers_temp);
2003 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2005 /* Create the VN_INFO structures, and initialize value numbers to
2007 for (i = 0; i < num_ssa_names; i++)
2009 tree name = ssa_name (i);
2012 VN_INFO_GET (name)->valnum = VN_TOP;
2013 VN_INFO (name)->expr = name;
2019 block_stmt_iterator bsi;
2020 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2022 tree stmt = bsi_stmt (bsi);
2023 stmt_ann (stmt)->uid = id++;
2027 /* Create the valid and optimistic value numbering tables. */
2028 valid_info = XCNEW (struct vn_tables_s);
2029 allocate_vn_table (valid_info);
2030 optimistic_info = XCNEW (struct vn_tables_s);
2031 allocate_vn_table (optimistic_info);
2036 switch_to_PRE_table (void)
2038 pre_info = XCNEW (struct vn_tables_s);
2039 allocate_vn_table (pre_info);
2040 current_info = pre_info;
2048 VEC_free (tree, heap, shared_lookup_phiargs);
2049 VEC_free (tree, gc, shared_lookup_vops);
2050 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2051 XDELETEVEC (rpo_numbers);
2052 for (i = 0; i < num_ssa_names; i++)
2054 tree name = ssa_name (i);
2057 XDELETE (VN_INFO (name));
2058 if (SSA_NAME_VALUE (name) &&
2059 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2060 SSA_NAME_VALUE (name) = NULL;
2064 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2065 VEC_free (tree, heap, sccstack);
2066 free_vn_table (valid_info);
2067 XDELETE (valid_info);
2068 free_vn_table (optimistic_info);
2069 XDELETE (optimistic_info);
2072 free_vn_table (pre_info);
2084 current_info = valid_info;
2086 for (param = DECL_ARGUMENTS (current_function_decl);
2088 param = TREE_CHAIN (param))
2090 if (gimple_default_def (cfun, param) != NULL)
2092 tree def = gimple_default_def (cfun, param);
2093 SSA_VAL (def) = def;
2097 for (i = num_ssa_names - 1; i > 0; i--)
2099 tree name = ssa_name (i);
2101 && VN_INFO (name)->visited == false
2102 && !has_zero_uses (name))
2106 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fprintf (dump_file, "Value numbers:\n");
2109 for (i = 0; i < num_ssa_names; i++)
2111 tree name = ssa_name (i);
2112 if (name && VN_INFO (name)->visited
2113 && (SSA_VAL (name) != name
2114 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2116 print_generic_expr (dump_file, name, 0);
2117 fprintf (dump_file, " = ");
2118 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2119 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2121 print_generic_expr (dump_file, SSA_VAL (name), 0);
2122 fprintf (dump_file, "\n");