1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
114 alloc_pool unary_op_pool;
115 alloc_pool binary_op_pool;
116 alloc_pool phis_pool;
117 alloc_pool references_pool;
120 /* Binary operations in the hashtable consist of two operands, an
121 opcode, and a type. Result is the value number of the operation,
122 and hashcode is stored to avoid having to calculate it
125 typedef struct vn_binary_op_s
127 enum tree_code opcode;
134 typedef const struct vn_binary_op_s *const_vn_binary_op_t;
136 /* Unary operations in the hashtable consist of a single operand, an
137 opcode, and a type. Result is the value number of the operation,
138 and hashcode is stored to avoid having to calculate it repeatedly. */
140 typedef struct vn_unary_op_s
142 enum tree_code opcode;
148 typedef const struct vn_unary_op_s *const_vn_unary_op_t;
150 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
151 arguments, and the basic block the phi is in. Result is the value
152 number of the operation, and hashcode is stored to avoid having to
153 calculate it repeatedly. Phi nodes not in the same block are never
154 considered equivalent. */
156 typedef struct vn_phi_s
158 VEC (tree, heap) *phiargs;
163 typedef const struct vn_phi_s *const_vn_phi_t;
165 /* Reference operands only exist in reference operations structures.
166 They consist of an opcode, type, and some number of operands. For
167 a given opcode, some, all, or none of the operands may be used.
168 The operands are there to store the information that makes up the
169 portion of the addressing calculation that opcode performs. */
171 typedef struct vn_reference_op_struct
173 enum tree_code opcode;
179 typedef vn_reference_op_s *vn_reference_op_t;
180 typedef const vn_reference_op_s *const_vn_reference_op_t;
182 DEF_VEC_O(vn_reference_op_s);
183 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
185 /* A reference operation in the hashtable is representation as a
186 collection of vuses, representing the memory state at the time of
187 the operation, and a collection of operands that make up the
188 addressing calculation. If two vn_reference_t's have the same set
189 of operands, they access the same memory location. We also store
190 the resulting value number, and the hashcode. The vuses are
191 always stored in order sorted by ssa name version. */
193 typedef struct vn_reference_s
195 VEC (tree, gc) *vuses;
196 VEC (vn_reference_op_s, heap) *operands;
200 typedef const struct vn_reference_s *const_vn_reference_t;
202 /* Valid hashtables storing information we have proven to be
205 static vn_tables_t valid_info;
207 /* Optimistic hashtables storing information we are making assumptions about
208 during iterations. */
210 static vn_tables_t optimistic_info;
212 /* PRE hashtables storing information about mapping from expressions to
215 static vn_tables_t pre_info;
217 /* Pointer to the set of hashtables that is currently being used.
218 Should always point to either the optimistic_info, or the
221 static vn_tables_t current_info;
224 /* Reverse post order index for each basic block. */
226 static int *rpo_numbers;
228 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
230 /* This represents the top of the VN lattice, which is the universal
235 /* Next DFS number and the stack for strongly connected component
238 static unsigned int next_dfs_num;
239 static VEC (tree, heap) *sccstack;
241 DEF_VEC_P(vn_ssa_aux_t);
242 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
244 /* Table of vn_ssa_aux_t's, one per ssa_name. */
246 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
248 /* Return the value numbering information for a given SSA name. */
253 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
254 SSA_NAME_VERSION (name));
257 /* Set the value numbering info for a given SSA name to a given
261 VN_INFO_SET (tree name, vn_ssa_aux_t value)
263 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
264 SSA_NAME_VERSION (name), value);
267 /* Get the value numbering info for a given SSA name, creating it if
268 it does not exist. */
271 VN_INFO_GET (tree name)
273 vn_ssa_aux_t newinfo = XCNEW (struct vn_ssa_aux);
274 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
275 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
276 SSA_NAME_VERSION (name) + 1);
277 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
278 SSA_NAME_VERSION (name), newinfo);
283 /* Compare two reference operands P1 and P2 for equality. return true if
284 they are equal, and false otherwise. */
287 vn_reference_op_eq (const void *p1, const void *p2)
289 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
290 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
291 return vro1->opcode == vro2->opcode
292 && vro1->type == vro2->type
293 && expressions_equal_p (vro1->op0, vro2->op0)
294 && expressions_equal_p (vro1->op1, vro2->op1)
295 && expressions_equal_p (vro1->op2, vro2->op2);
298 /* Compute the hash for a reference operand VRO1 */
301 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
303 return iterative_hash_expr (vro1->op0, vro1->opcode)
304 + iterative_hash_expr (vro1->op1, vro1->opcode)
305 + iterative_hash_expr (vro1->op2, vro1->opcode);
308 /* Return the hashcode for a given reference operation P1. */
311 vn_reference_hash (const void *p1)
313 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
314 return vr1->hashcode;
317 /* Compute a hash for the reference operation VR1 and return it. */
319 static inline hashval_t
320 vn_reference_compute_hash (const vn_reference_t vr1)
322 hashval_t result = 0;
325 vn_reference_op_t vro;
327 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
328 result += iterative_hash_expr (v, 0);
329 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
330 result += vn_reference_op_compute_hash (vro);
335 /* Return true if reference operations P1 and P2 are equivalent. This
336 means they have the same set of operands and vuses. */
339 vn_reference_eq (const void *p1, const void *p2)
343 vn_reference_op_t vro;
345 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
346 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
348 if (vr1->vuses == vr2->vuses
349 && vr1->operands == vr2->operands)
352 /* Impossible for them to be equivalent if they have different
354 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
357 /* We require that address operands be canonicalized in a way that
358 two memory references will have the same operands if they are
360 if (VEC_length (vn_reference_op_s, vr1->operands)
361 != VEC_length (vn_reference_op_s, vr2->operands))
364 /* The memory state is more often different than the address of the
365 store/load, so check it first. */
366 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
368 if (VEC_index (tree, vr2->vuses, i) != v)
372 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
374 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
381 /* Place the vuses from STMT into *result */
384 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
392 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
394 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
395 VEC_quick_push (tree, *result, vuse);
397 if (VEC_length (tree, *result) > 1)
398 sort_vuses (*result);
402 /* Copy the VUSE names in STMT into a vector, and return
406 copy_vuses_from_stmt (tree stmt)
408 VEC (tree, gc) *vuses = NULL;
410 vuses_to_vec (stmt, &vuses);
415 /* Place the vdefs from STMT into *result */
418 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
426 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
428 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
429 VEC_quick_push (tree, *result, vdef);
431 if (VEC_length (tree, *result) > 1)
432 sort_vuses (*result);
435 /* Copy the names of vdef results in STMT into a vector, and return
438 static VEC (tree, gc) *
439 copy_vdefs_from_stmt (tree stmt)
441 VEC (tree, gc) *vdefs = NULL;
443 vdefs_to_vec (stmt, &vdefs);
448 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
449 static VEC (tree, gc) *shared_lookup_vops;
451 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
452 This function will overwrite the current SHARED_LOOKUP_VOPS
456 shared_vuses_from_stmt (tree stmt)
458 VEC_truncate (tree, shared_lookup_vops, 0);
459 vuses_to_vec (stmt, &shared_lookup_vops);
461 return shared_lookup_vops;
464 /* Copy the operations present in load/store/call REF into RESULT, a vector of
465 vn_reference_op_s's. */
468 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
470 /* Calls are different from all other reference operations. */
471 if (TREE_CODE (ref) == CALL_EXPR)
473 vn_reference_op_s temp;
475 call_expr_arg_iterator iter;
478 /* Copy the call_expr opcode, type, function being called, and
480 memset (&temp, 0, sizeof (temp));
481 temp.type = TREE_TYPE (ref);
482 temp.opcode = CALL_EXPR;
483 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
485 callfn = get_callee_fndecl (ref);
487 callfn = CALL_EXPR_FN (ref);
488 temp.type = TREE_TYPE (callfn);
489 temp.opcode = TREE_CODE (callfn);
491 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
493 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
495 memset (&temp, 0, sizeof (temp));
496 temp.type = TREE_TYPE (callarg);
497 temp.opcode = TREE_CODE (callarg);
499 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
504 /* For non-calls, store the information that makes up the address. */
508 vn_reference_op_s temp;
510 memset (&temp, 0, sizeof (temp));
511 temp.type = TREE_TYPE (ref);
512 temp.opcode = TREE_CODE (ref);
516 case ALIGN_INDIRECT_REF:
517 case MISALIGNED_INDIRECT_REF:
519 /* The only operand is the address, which gets its own
520 vn_reference_op_s structure. */
523 /* Record bits and position. */
524 temp.op0 = TREE_OPERAND (ref, 1);
525 temp.op1 = TREE_OPERAND (ref, 2);
528 /* Record field as operand. */
529 temp.op0 = TREE_OPERAND (ref, 1);
531 case ARRAY_RANGE_REF:
533 /* Record index as operand. */
534 temp.op0 = TREE_OPERAND (ref, 1);
535 temp.op1 = TREE_OPERAND (ref, 3);
550 /* These are only interesting for their operands, their
551 existence, and their type. They will never be the last
552 ref in the chain of references (IE they require an
553 operand), so we don't have to put anything
554 for op* as it will be handled by the iteration */
557 case VIEW_CONVERT_EXPR:
564 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
566 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
567 ref = TREE_OPERAND (ref, 0);
573 /* Create a vector of vn_reference_op_s structures from REF, a
574 REFERENCE_CLASS_P tree. The vector is not shared. */
576 static VEC(vn_reference_op_s, heap) *
577 create_reference_ops_from_ref (tree ref)
579 VEC (vn_reference_op_s, heap) *result = NULL;
581 copy_reference_ops_from_ref (ref, &result);
585 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
587 /* Create a vector of vn_reference_op_s structures from REF, a
588 REFERENCE_CLASS_P tree. The vector is shared among all callers of
591 static VEC(vn_reference_op_s, heap) *
592 shared_reference_ops_from_ref (tree ref)
596 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
597 copy_reference_ops_from_ref (ref, &shared_lookup_references);
598 return shared_lookup_references;
602 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
603 structures into their value numbers. This is done in-place, and
604 the vector passed in is returned. */
606 static VEC (vn_reference_op_s, heap) *
607 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
609 vn_reference_op_t vro;
612 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
614 if (vro->opcode == SSA_NAME
615 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
616 vro->op0 = SSA_VAL (vro->op0);
622 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
623 their value numbers. This is done in-place, and the vector passed
626 static VEC (tree, gc) *
627 valueize_vuses (VEC (tree, gc) *orig)
629 bool made_replacement = false;
633 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
635 if (vuse != SSA_VAL (vuse))
637 made_replacement = true;
638 VEC_replace (tree, orig, i, SSA_VAL (vuse));
642 if (made_replacement && VEC_length (tree, orig) > 1)
648 /* Lookup OP in the current hash table, and return the resulting
649 value number if it exists in the hash table. Return NULL_TREE if
650 it does not exist in the hash table. */
653 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
656 struct vn_reference_s vr1;
658 vr1.vuses = valueize_vuses (vuses);
659 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
660 vr1.hashcode = vn_reference_compute_hash (&vr1);
661 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
663 if (!slot && current_info == optimistic_info)
664 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
669 return ((vn_reference_t)*slot)->result;
672 /* Insert OP into the current hash table with a value number of
676 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
681 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
683 vr1->vuses = valueize_vuses (vuses);
684 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
685 vr1->hashcode = vn_reference_compute_hash (vr1);
686 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
688 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
691 /* Because we lookup stores using vuses, and value number failures
692 using the vdefs (see visit_reference_op_store for how and why),
693 it's possible that on failure we may try to insert an already
694 inserted store. This is not wrong, there is no ssa name for a
695 store that we could use as a differentiator anyway. Thus, unlike
696 the other lookup functions, you cannot gcc_assert (!*slot)
704 /* Return the stored hashcode for a unary operation. */
707 vn_unary_op_hash (const void *p1)
709 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
710 return vuo1->hashcode;
713 /* Hash a unary operation P1 and return the result. */
715 static inline hashval_t
716 vn_unary_op_compute_hash (const vn_unary_op_t vuo1)
718 return iterative_hash_expr (vuo1->op0, vuo1->opcode);
721 /* Return true if P1 and P2, two unary operations, are equivalent. */
724 vn_unary_op_eq (const void *p1, const void *p2)
726 const_vn_unary_op_t const vuo1 = (const_vn_unary_op_t) p1;
727 const_vn_unary_op_t const vuo2 = (const_vn_unary_op_t) p2;
728 return vuo1->opcode == vuo2->opcode
729 && vuo1->type == vuo2->type
730 && expressions_equal_p (vuo1->op0, vuo2->op0);
733 /* Lookup OP in the current hash table, and return the resulting
734 value number if it exists in the hash table. Return NULL_TREE if
735 it does not exist in the hash table. */
738 vn_unary_op_lookup (tree op)
741 struct vn_unary_op_s vuo1;
743 vuo1.opcode = TREE_CODE (op);
744 vuo1.type = TREE_TYPE (op);
745 vuo1.op0 = TREE_OPERAND (op, 0);
747 if (TREE_CODE (vuo1.op0) == SSA_NAME)
748 vuo1.op0 = SSA_VAL (vuo1.op0);
750 vuo1.hashcode = vn_unary_op_compute_hash (&vuo1);
751 slot = htab_find_slot_with_hash (current_info->unary, &vuo1, vuo1.hashcode,
753 if (!slot && current_info == optimistic_info)
754 slot = htab_find_slot_with_hash (valid_info->unary, &vuo1, vuo1.hashcode,
758 return ((vn_unary_op_t)*slot)->result;
761 /* Insert OP into the current hash table with a value number of
765 vn_unary_op_insert (tree op, tree result)
768 vn_unary_op_t vuo1 = (vn_unary_op_t) pool_alloc (current_info->unary_op_pool);
770 vuo1->opcode = TREE_CODE (op);
771 vuo1->type = TREE_TYPE (op);
772 vuo1->op0 = TREE_OPERAND (op, 0);
773 vuo1->result = result;
775 if (TREE_CODE (vuo1->op0) == SSA_NAME)
776 vuo1->op0 = SSA_VAL (vuo1->op0);
778 vuo1->hashcode = vn_unary_op_compute_hash (vuo1);
779 slot = htab_find_slot_with_hash (current_info->unary, vuo1, vuo1->hashcode,
785 /* Compute and return the hash value for binary operation VBO1. */
787 static inline hashval_t
788 vn_binary_op_compute_hash (const vn_binary_op_t vbo1)
790 return iterative_hash_expr (vbo1->op0, vbo1->opcode)
791 + iterative_hash_expr (vbo1->op1, vbo1->opcode);
794 /* Return the computed hashcode for binary operation P1. */
797 vn_binary_op_hash (const void *p1)
799 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
800 return vbo1->hashcode;
803 /* Compare binary operations P1 and P2 and return true if they are
807 vn_binary_op_eq (const void *p1, const void *p2)
809 const_vn_binary_op_t const vbo1 = (const_vn_binary_op_t) p1;
810 const_vn_binary_op_t const vbo2 = (const_vn_binary_op_t) p2;
811 return vbo1->opcode == vbo2->opcode
812 && vbo1->type == vbo2->type
813 && expressions_equal_p (vbo1->op0, vbo2->op0)
814 && expressions_equal_p (vbo1->op1, vbo2->op1);
817 /* Lookup OP in the current hash table, and return the resulting
818 value number if it exists in the hash table. Return NULL_TREE if
819 it does not exist in the hash table. */
822 vn_binary_op_lookup (tree op)
825 struct vn_binary_op_s vbo1;
827 vbo1.opcode = TREE_CODE (op);
828 vbo1.type = TREE_TYPE (op);
829 vbo1.op0 = TREE_OPERAND (op, 0);
830 vbo1.op1 = TREE_OPERAND (op, 1);
832 if (TREE_CODE (vbo1.op0) == SSA_NAME)
833 vbo1.op0 = SSA_VAL (vbo1.op0);
834 if (TREE_CODE (vbo1.op1) == SSA_NAME)
835 vbo1.op1 = SSA_VAL (vbo1.op1);
837 if (tree_swap_operands_p (vbo1.op0, vbo1.op1, false)
838 && commutative_tree_code (vbo1.opcode))
840 tree temp = vbo1.op0;
845 vbo1.hashcode = vn_binary_op_compute_hash (&vbo1);
846 slot = htab_find_slot_with_hash (current_info->binary, &vbo1, vbo1.hashcode,
848 if (!slot && current_info == optimistic_info)
849 slot = htab_find_slot_with_hash (valid_info->binary, &vbo1, vbo1.hashcode,
853 return ((vn_binary_op_t)*slot)->result;
856 /* Insert OP into the current hash table with a value number of
860 vn_binary_op_insert (tree op, tree result)
864 vbo1 = (vn_binary_op_t) pool_alloc (current_info->binary_op_pool);
866 vbo1->opcode = TREE_CODE (op);
867 vbo1->type = TREE_TYPE (op);
868 vbo1->op0 = TREE_OPERAND (op, 0);
869 vbo1->op1 = TREE_OPERAND (op, 1);
870 vbo1->result = result;
872 if (TREE_CODE (vbo1->op0) == SSA_NAME)
873 vbo1->op0 = SSA_VAL (vbo1->op0);
874 if (TREE_CODE (vbo1->op1) == SSA_NAME)
875 vbo1->op1 = SSA_VAL (vbo1->op1);
877 if (tree_swap_operands_p (vbo1->op0, vbo1->op1, false)
878 && commutative_tree_code (vbo1->opcode))
880 tree temp = vbo1->op0;
881 vbo1->op0 = vbo1->op1;
884 vbo1->hashcode = vn_binary_op_compute_hash (vbo1);
885 slot = htab_find_slot_with_hash (current_info->binary, vbo1, vbo1->hashcode,
892 /* Compute a hashcode for PHI operation VP1 and return it. */
894 static inline hashval_t
895 vn_phi_compute_hash (vn_phi_t vp1)
897 hashval_t result = 0;
901 result = vp1->block->index;
903 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
905 if (phi1op == VN_TOP)
907 result += iterative_hash_expr (phi1op, result);
913 /* Return the computed hashcode for phi operation P1. */
916 vn_phi_hash (const void *p1)
918 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
919 return vp1->hashcode;
922 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
925 vn_phi_eq (const void *p1, const void *p2)
927 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
928 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
930 if (vp1->block == vp2->block)
935 /* Any phi in the same block will have it's arguments in the
936 same edge order, because of how we store phi nodes. */
937 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
939 tree phi2op = VEC_index (tree, vp2->phiargs, i);
940 if (phi1op == VN_TOP || phi2op == VN_TOP)
942 if (!expressions_equal_p (phi1op, phi2op))
950 static VEC(tree, heap) *shared_lookup_phiargs;
952 /* Lookup PHI in the current hash table, and return the resulting
953 value number if it exists in the hash table. Return NULL_TREE if
954 it does not exist in the hash table. */
957 vn_phi_lookup (tree phi)
963 VEC_truncate (tree, shared_lookup_phiargs, 0);
965 /* Canonicalize the SSA_NAME's to their value number. */
966 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
968 tree def = PHI_ARG_DEF (phi, i);
969 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
970 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
972 vp1.phiargs = shared_lookup_phiargs;
973 vp1.block = bb_for_stmt (phi);
974 vp1.hashcode = vn_phi_compute_hash (&vp1);
975 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
977 if (!slot && current_info == optimistic_info)
978 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
982 return ((vn_phi_t)*slot)->result;
985 /* Insert PHI into the current hash table with a value number of
989 vn_phi_insert (tree phi, tree result)
992 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
994 VEC (tree, heap) *args = NULL;
996 /* Canonicalize the SSA_NAME's to their value number. */
997 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
999 tree def = PHI_ARG_DEF (phi, i);
1000 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1001 VEC_safe_push (tree, heap, args, def);
1003 vp1->phiargs = args;
1004 vp1->block = bb_for_stmt (phi);
1005 vp1->result = result;
1006 vp1->hashcode = vn_phi_compute_hash (vp1);
1008 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1011 /* Because we iterate over phi operations more than once, it's
1012 possible the slot might already exist here, hence no assert.*/
1017 /* Print set of components in strongly connected component SCC to OUT. */
1020 print_scc (FILE *out, VEC (tree, heap) *scc)
1025 fprintf (out, "SCC consists of: ");
1026 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1028 print_generic_expr (out, var, 0);
1031 fprintf (out, "\n");
1034 /* Set the value number of FROM to TO, return true if it has changed
1038 set_ssa_val_to (tree from, tree to)
1043 && TREE_CODE (to) == SSA_NAME
1044 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1047 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1048 and invariants. So assert that here. */
1049 gcc_assert (to != NULL_TREE
1051 || TREE_CODE (to) == SSA_NAME
1052 || is_gimple_min_invariant (to)));
1054 if (dump_file && (dump_flags & TDF_DETAILS))
1056 fprintf (dump_file, "Setting value number of ");
1057 print_generic_expr (dump_file, from, 0);
1058 fprintf (dump_file, " to ");
1059 print_generic_expr (dump_file, to, 0);
1060 fprintf (dump_file, "\n");
1063 currval = SSA_VAL (from);
1065 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1067 SSA_VAL (from) = to;
1073 /* Set all definitions in STMT to value number to themselves.
1074 Return true if a value number changed. */
1077 defs_to_varying (tree stmt)
1079 bool changed = false;
1083 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1085 tree def = DEF_FROM_PTR (defp);
1087 VN_INFO (def)->use_processed = true;
1088 changed |= set_ssa_val_to (def, def);
1093 /* Visit a copy between LHS and RHS, return true if the value number
1097 visit_copy (tree lhs, tree rhs)
1100 /* Follow chains of copies to their destination. */
1101 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1102 rhs = SSA_VAL (rhs);
1104 /* The copy may have a more interesting constant filled expression
1105 (we don't, since we know our RHS is just an SSA name). */
1106 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1107 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1109 return set_ssa_val_to (lhs, rhs);
1112 /* Visit a unary operator RHS, value number it, and return true if the
1113 value number of LHS has changed as a result. */
1116 visit_unary_op (tree lhs, tree op)
1118 bool changed = false;
1119 tree result = vn_unary_op_lookup (op);
1123 changed = set_ssa_val_to (lhs, result);
1127 changed = set_ssa_val_to (lhs, lhs);
1128 vn_unary_op_insert (op, lhs);
1134 /* Visit a binary operator RHS, value number it, and return true if the
1135 value number of LHS has changed as a result. */
1138 visit_binary_op (tree lhs, tree op)
1140 bool changed = false;
1141 tree result = vn_binary_op_lookup (op);
1145 changed = set_ssa_val_to (lhs, result);
1149 changed = set_ssa_val_to (lhs, lhs);
1150 vn_binary_op_insert (op, lhs);
1156 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1157 and return true if the value number of the LHS has changed as a result. */
1160 visit_reference_op_load (tree lhs, tree op, tree stmt)
1162 bool changed = false;
1163 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1167 changed = set_ssa_val_to (lhs, result);
1171 changed = set_ssa_val_to (lhs, lhs);
1172 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1179 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1180 and return true if the value number of the LHS has changed as a result. */
1183 visit_reference_op_store (tree lhs, tree op, tree stmt)
1185 bool changed = false;
1187 bool resultsame = false;
1189 /* First we want to lookup using the *vuses* from the store and see
1190 if there the last store to this location with the same address
1193 The vuses represent the memory state before the store. If the
1194 memory state, address, and value of the store is the same as the
1195 last store to this location, then this store will produce the
1196 same memory state as that store.
1198 In this case the vdef versions for this store are value numbered to those
1199 vuse versions, since they represent the same memory state after
1202 Otherwise, the vdefs for the store are used when inserting into
1203 the table, since the store generates a new memory state. */
1205 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1209 if (TREE_CODE (result) == SSA_NAME)
1210 result = SSA_VAL (result);
1211 resultsame = expressions_equal_p (result, op);
1214 if (!result || !resultsame)
1216 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1222 fprintf (dump_file, "No store match\n");
1223 fprintf (dump_file, "Value numbering store ");
1224 print_generic_expr (dump_file, lhs, 0);
1225 fprintf (dump_file, " to ");
1226 print_generic_expr (dump_file, op, 0);
1227 fprintf (dump_file, "\n");
1229 /* Have to set value numbers before insert, since insert is
1230 going to valueize the references in-place. */
1231 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1233 VN_INFO (vdef)->use_processed = true;
1234 changed |= set_ssa_val_to (vdef, vdef);
1237 vn_reference_insert (lhs, op, vdefs);
1241 /* We had a match, so value number the vdefs to have the value
1242 number of the vuses they came from. */
1243 ssa_op_iter op_iter;
1247 if (dump_file && (dump_flags & TDF_DETAILS))
1248 fprintf (dump_file, "Store matched earlier value,"
1249 "value numbering store vdefs to matching vuses.\n");
1251 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1253 tree def = DEF_FROM_PTR (var);
1256 /* Uh, if the vuse is a multiuse, we can't really do much
1257 here, sadly, since we don't know which value number of
1258 which vuse to use. */
1259 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1262 use = VUSE_ELEMENT_VAR (*vv, 0);
1264 VN_INFO (def)->use_processed = true;
1265 changed |= set_ssa_val_to (def, SSA_VAL (use));
1272 /* Visit and value number PHI, return true if the value number
1276 visit_phi (tree phi)
1278 bool changed = false;
1280 tree sameval = VN_TOP;
1281 bool allsame = true;
1284 /* TODO: We could check for this in init_sccvn, and replace this
1285 with a gcc_assert. */
1286 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1287 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1289 /* See if all non-TOP arguments have the same value. TOP is
1290 equivalent to everything, so we can ignore it. */
1291 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1293 tree def = PHI_ARG_DEF (phi, i);
1295 if (TREE_CODE (def) == SSA_NAME)
1296 def = SSA_VAL (def);
1299 if (sameval == VN_TOP)
1305 if (!expressions_equal_p (def, sameval))
1313 /* If all value numbered to the same value, the phi node has that
1317 if (is_gimple_min_invariant (sameval))
1319 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1320 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1324 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1325 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1328 if (TREE_CODE (sameval) == SSA_NAME)
1329 return visit_copy (PHI_RESULT (phi), sameval);
1331 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1334 /* Otherwise, see if it is equivalent to a phi node in this block. */
1335 result = vn_phi_lookup (phi);
1338 if (TREE_CODE (result) == SSA_NAME)
1339 changed = visit_copy (PHI_RESULT (phi), result);
1341 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1345 vn_phi_insert (phi, PHI_RESULT (phi));
1346 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1347 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1348 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1354 /* Return true if EXPR contains constants. */
1357 expr_has_constants (tree expr)
1359 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1362 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1365 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1366 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1367 /* Constants inside reference ops are rarely interesting, but
1368 it can take a lot of looking to find them. */
1370 case tcc_declaration:
1373 return is_gimple_min_invariant (expr);
1378 /* Replace SSA_NAMES in expr with their value numbers, and return the
1380 This is performed in place. */
1383 valueize_expr (tree expr)
1385 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1388 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1389 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1390 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1393 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1394 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1395 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1396 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1397 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1398 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1406 /* Simplify the binary expression RHS, and return the result if
1410 simplify_binary_expression (tree stmt, tree rhs)
1412 tree result = NULL_TREE;
1413 tree op0 = TREE_OPERAND (rhs, 0);
1414 tree op1 = TREE_OPERAND (rhs, 1);
1416 /* This will not catch every single case we could combine, but will
1417 catch those with constants. The goal here is to simultaneously
1418 combine constants between expressions, but avoid infinite
1419 expansion of expressions during simplification. */
1420 if (TREE_CODE (op0) == SSA_NAME)
1422 if (VN_INFO (op0)->has_constants)
1423 op0 = valueize_expr (VN_INFO (op0)->expr);
1424 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1425 op0 = SSA_VAL (op0);
1428 if (TREE_CODE (op1) == SSA_NAME)
1430 if (VN_INFO (op1)->has_constants)
1431 op1 = valueize_expr (VN_INFO (op1)->expr);
1432 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1433 op1 = SSA_VAL (op1);
1436 /* Avoid folding if nothing changed. */
1437 if (op0 == TREE_OPERAND (rhs, 0)
1438 && op1 == TREE_OPERAND (rhs, 1))
1441 fold_defer_overflow_warnings ();
1443 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1445 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1448 /* Make sure result is not a complex expression consisting
1449 of operators of operators (IE (a + b) + (a + c))
1450 Otherwise, we will end up with unbounded expressions if
1451 fold does anything at all. */
1452 if (result && valid_gimple_expression_p (result))
1458 /* Simplify the unary expression RHS, and return the result if
1462 simplify_unary_expression (tree rhs)
1464 tree result = NULL_TREE;
1465 tree op0 = TREE_OPERAND (rhs, 0);
1467 if (TREE_CODE (op0) != SSA_NAME)
1470 if (VN_INFO (op0)->has_constants)
1471 op0 = valueize_expr (VN_INFO (op0)->expr);
1472 else if (TREE_CODE (rhs) == NOP_EXPR
1473 || TREE_CODE (rhs) == CONVERT_EXPR
1474 || TREE_CODE (rhs) == REALPART_EXPR
1475 || TREE_CODE (rhs) == IMAGPART_EXPR)
1477 /* We want to do tree-combining on conversion-like expressions.
1478 Make sure we feed only SSA_NAMEs or constants to fold though. */
1479 tree tem = valueize_expr (VN_INFO (op0)->expr);
1480 if (UNARY_CLASS_P (tem)
1481 || BINARY_CLASS_P (tem)
1482 || TREE_CODE (tem) == SSA_NAME
1483 || is_gimple_min_invariant (tem))
1487 /* Avoid folding if nothing changed, but remember the expression. */
1488 if (op0 == TREE_OPERAND (rhs, 0))
1491 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1494 STRIP_USELESS_TYPE_CONVERSION (result);
1495 if (valid_gimple_expression_p (result))
1502 /* Try to simplify RHS using equivalences and constant folding. */
1505 try_to_simplify (tree stmt, tree rhs)
1507 if (TREE_CODE (rhs) == SSA_NAME)
1509 if (is_gimple_min_invariant (SSA_VAL (rhs)))
1510 return SSA_VAL (rhs);
1511 else if (VN_INFO (rhs)->has_constants)
1512 return VN_INFO (rhs)->expr;
1516 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1518 /* For references, see if we find a result for the lookup,
1519 and use it if we do. */
1520 case tcc_declaration:
1521 /* Pull out any truly constant values. */
1522 if (TREE_READONLY (rhs)
1523 && TREE_STATIC (rhs)
1524 && DECL_INITIAL (rhs)
1525 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1526 return DECL_INITIAL (rhs);
1531 tree result = vn_reference_lookup (rhs,
1532 shared_vuses_from_stmt (stmt));
1536 /* Fallthrough for some codes. */
1537 if (!(TREE_CODE (rhs) == REALPART_EXPR
1538 || TREE_CODE (rhs) == IMAGPART_EXPR))
1540 /* We could do a little more with unary ops, if they expand
1541 into binary ops, but it's debatable whether it is worth it. */
1543 return simplify_unary_expression (rhs);
1545 case tcc_comparison:
1547 return simplify_binary_expression (stmt, rhs);
1556 /* Visit and value number USE, return true if the value number
1560 visit_use (tree use)
1562 bool changed = false;
1563 tree stmt = SSA_NAME_DEF_STMT (use);
1566 VN_INFO (use)->use_processed = true;
1568 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1569 if (dump_file && (dump_flags & TDF_DETAILS))
1571 fprintf (dump_file, "Value numbering ");
1572 print_generic_expr (dump_file, use, 0);
1573 fprintf (dump_file, " stmt = ");
1574 print_generic_stmt (dump_file, stmt, 0);
1577 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1578 if (TREE_CODE (stmt) == RETURN_EXPR
1579 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1580 stmt = TREE_OPERAND (stmt, 0);
1582 ann = stmt_ann (stmt);
1584 /* Handle uninitialized uses. */
1585 if (IS_EMPTY_STMT (stmt))
1587 changed = set_ssa_val_to (use, use);
1591 if (TREE_CODE (stmt) == PHI_NODE)
1593 changed = visit_phi (stmt);
1595 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1596 || (ann && ann->has_volatile_ops))
1598 changed = defs_to_varying (stmt);
1602 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1603 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1606 STRIP_USELESS_TYPE_CONVERSION (rhs);
1608 /* Shortcut for copies. Simplifying copies is pointless,
1609 since we copy the expression and value they represent. */
1610 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1612 changed = visit_copy (lhs, rhs);
1615 simplified = try_to_simplify (stmt, rhs);
1616 if (simplified && simplified != rhs)
1618 if (dump_file && (dump_flags & TDF_DETAILS))
1620 fprintf (dump_file, "RHS ");
1621 print_generic_expr (dump_file, rhs, 0);
1622 fprintf (dump_file, " simplified to ");
1623 print_generic_expr (dump_file, simplified, 0);
1624 if (TREE_CODE (lhs) == SSA_NAME)
1625 fprintf (dump_file, " has constants %d\n",
1626 VN_INFO (lhs)->has_constants);
1628 fprintf (dump_file, "\n");
1632 /* Setting value numbers to constants will occasionally
1633 screw up phi congruence because constants are not
1634 uniquely associated with a single ssa name that can be
1636 if (simplified && is_gimple_min_invariant (simplified)
1637 && TREE_CODE (lhs) == SSA_NAME
1638 && simplified != rhs)
1640 VN_INFO (lhs)->expr = simplified;
1641 VN_INFO (lhs)->has_constants = true;
1642 changed = set_ssa_val_to (lhs, simplified);
1645 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1646 && TREE_CODE (lhs) == SSA_NAME)
1648 changed = visit_copy (lhs, simplified);
1651 else if (simplified)
1653 if (TREE_CODE (lhs) == SSA_NAME)
1655 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1656 /* We have to unshare the expression or else
1657 valuizing may change the IL stream. */
1658 VN_INFO (lhs)->expr = unshare_expr (simplified);
1662 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1664 VN_INFO (lhs)->has_constants = true;
1665 VN_INFO (lhs)->expr = unshare_expr (rhs);
1667 else if (TREE_CODE (lhs) == SSA_NAME)
1669 /* We reset expr and constantness here because we may
1670 have been value numbering optimistically, and
1671 iterating. They may become non-constant in this case,
1672 even if they were optimistically constant. */
1674 VN_INFO (lhs)->has_constants = false;
1675 VN_INFO (lhs)->expr = lhs;
1678 if (TREE_CODE (lhs) == SSA_NAME
1679 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1680 changed = defs_to_varying (stmt);
1681 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1683 changed = visit_reference_op_store (lhs, rhs, stmt);
1685 else if (TREE_CODE (lhs) == SSA_NAME)
1687 if (is_gimple_min_invariant (rhs))
1689 VN_INFO (lhs)->has_constants = true;
1690 VN_INFO (lhs)->expr = rhs;
1691 changed = set_ssa_val_to (lhs, rhs);
1695 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1698 changed = visit_unary_op (lhs, rhs);
1701 changed = visit_binary_op (lhs, rhs);
1703 /* If tcc_vl_expr ever encompasses more than
1704 CALL_EXPR, this will need to be changed. */
1706 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1707 changed = visit_reference_op_load (lhs, rhs, stmt);
1709 changed = defs_to_varying (stmt);
1711 case tcc_declaration:
1713 changed = visit_reference_op_load (lhs, rhs, stmt);
1715 case tcc_expression:
1716 if (TREE_CODE (rhs) == ADDR_EXPR)
1718 changed = visit_unary_op (lhs, rhs);
1723 changed = defs_to_varying (stmt);
1729 changed = defs_to_varying (stmt);
1736 /* Compare two operands by reverse postorder index */
1739 compare_ops (const void *pa, const void *pb)
1741 const tree opa = *((const tree *)pa);
1742 const tree opb = *((const tree *)pb);
1743 tree opstmta = SSA_NAME_DEF_STMT (opa);
1744 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1748 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1750 else if (IS_EMPTY_STMT (opstmta))
1752 else if (IS_EMPTY_STMT (opstmtb))
1755 bba = bb_for_stmt (opstmta);
1756 bbb = bb_for_stmt (opstmtb);
1767 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1769 else if (TREE_CODE (opstmta) == PHI_NODE)
1771 else if (TREE_CODE (opstmtb) == PHI_NODE)
1773 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1775 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1778 /* Sort an array containing members of a strongly connected component
1779 SCC so that the members are ordered by RPO number.
1780 This means that when the sort is complete, iterating through the
1781 array will give you the members in RPO order. */
1784 sort_scc (VEC (tree, heap) *scc)
1786 qsort (VEC_address (tree, scc),
1787 VEC_length (tree, scc),
1792 /* Process a strongly connected component in the SSA graph. */
1795 process_scc (VEC (tree, heap) *scc)
1797 /* If the SCC has a single member, just visit it. */
1799 if (VEC_length (tree, scc) == 1)
1801 tree use = VEC_index (tree, scc, 0);
1802 if (!VN_INFO (use)->use_processed)
1809 unsigned int iterations = 0;
1810 bool changed = true;
1812 /* Iterate over the SCC with the optimistic table until it stops
1814 current_info = optimistic_info;
1819 htab_empty (optimistic_info->unary);
1820 htab_empty (optimistic_info->binary);
1821 htab_empty (optimistic_info->phis);
1822 htab_empty (optimistic_info->references);
1823 empty_alloc_pool (optimistic_info->unary_op_pool);
1824 empty_alloc_pool (optimistic_info->binary_op_pool);
1825 empty_alloc_pool (optimistic_info->phis_pool);
1826 empty_alloc_pool (optimistic_info->references_pool);
1827 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1828 changed |= visit_use (var);
1831 if (dump_file && (dump_flags & TDF_STATS))
1832 fprintf (dump_file, "Processing SCC required %d iterations\n",
1835 /* Finally, visit the SCC once using the valid table. */
1836 current_info = valid_info;
1837 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1842 /* Depth first search on NAME to discover and process SCC's in the SSA
1844 Execution of this algorithm relies on the fact that the SCC's are
1845 popped off the stack in topological order.
1846 Returns true if successful, false if we stopped processing SCC's due
1847 to ressource constraints. */
1857 VN_INFO (name)->dfsnum = next_dfs_num++;
1858 VN_INFO (name)->visited = true;
1859 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1861 VEC_safe_push (tree, heap, sccstack, name);
1862 VN_INFO (name)->on_sccstack = true;
1863 defstmt = SSA_NAME_DEF_STMT (name);
1865 /* Recursively DFS on our operands, looking for SCC's. */
1866 if (!IS_EMPTY_STMT (defstmt))
1868 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1871 tree use = USE_FROM_PTR (usep);
1873 /* Since we handle phi nodes, we will sometimes get
1874 invariants in the use expression. */
1875 if (TREE_CODE (use) != SSA_NAME)
1878 if (! (VN_INFO (use)->visited))
1882 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1883 VN_INFO (use)->low);
1885 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1886 && VN_INFO (use)->on_sccstack)
1888 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1889 VN_INFO (name)->low);
1894 /* See if we found an SCC. */
1895 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1897 VEC (tree, heap) *scc = NULL;
1900 /* Found an SCC, pop the components off the SCC stack and
1904 x = VEC_pop (tree, sccstack);
1906 VN_INFO (x)->on_sccstack = false;
1907 VEC_safe_push (tree, heap, scc, x);
1908 } while (x != name);
1910 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1911 if (VEC_length (tree, scc)
1912 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1915 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1916 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1917 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1921 if (VEC_length (tree, scc) > 1)
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1925 print_scc (dump_file, scc);
1929 VEC_free (tree, heap, scc);
1939 VEC_free (tree, heap, phi->phiargs);
1943 /* Free a reference operation structure VP. */
1946 free_reference (void *vp)
1948 vn_reference_t vr = vp;
1949 VEC_free (vn_reference_op_s, heap, vr->operands);
1952 /* Allocate a value number table. */
1955 allocate_vn_table (vn_tables_t table)
1957 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1958 table->unary = htab_create (23, vn_unary_op_hash, vn_unary_op_eq, NULL);
1959 table->binary = htab_create (23, vn_binary_op_hash, vn_binary_op_eq, NULL);
1960 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1963 table->unary_op_pool = create_alloc_pool ("VN unary operations",
1964 sizeof (struct vn_unary_op_s),
1966 table->binary_op_pool = create_alloc_pool ("VN binary operations",
1967 sizeof (struct vn_binary_op_s),
1969 table->phis_pool = create_alloc_pool ("VN phis",
1970 sizeof (struct vn_phi_s),
1972 table->references_pool = create_alloc_pool ("VN references",
1973 sizeof (struct vn_reference_s),
1977 /* Free a value number table. */
1980 free_vn_table (vn_tables_t table)
1982 htab_delete (table->phis);
1983 htab_delete (table->unary);
1984 htab_delete (table->binary);
1985 htab_delete (table->references);
1986 free_alloc_pool (table->unary_op_pool);
1987 free_alloc_pool (table->binary_op_pool);
1988 free_alloc_pool (table->phis_pool);
1989 free_alloc_pool (table->references_pool);
1997 int *rpo_numbers_temp;
2001 calculate_dominance_info (CDI_DOMINATORS);
2005 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2006 /* VEC_alloc doesn't actually grow it to the right size, it just
2007 preallocates the space to do so. */
2008 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2009 shared_lookup_phiargs = NULL;
2010 shared_lookup_vops = NULL;
2011 shared_lookup_references = NULL;
2012 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2013 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2014 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2016 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2017 the i'th block in RPO order is bb. We want to map bb's to RPO
2018 numbers, so we need to rearrange this array. */
2019 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2020 rpo_numbers[rpo_numbers_temp[j]] = j;
2022 free (rpo_numbers_temp);
2024 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2026 /* Create the VN_INFO structures, and initialize value numbers to
2028 for (i = 0; i < num_ssa_names; i++)
2030 tree name = ssa_name (i);
2033 VN_INFO_GET (name)->valnum = VN_TOP;
2034 VN_INFO (name)->expr = name;
2040 block_stmt_iterator bsi;
2041 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2043 tree stmt = bsi_stmt (bsi);
2044 stmt_ann (stmt)->uid = id++;
2048 /* Create the valid and optimistic value numbering tables. */
2049 valid_info = XCNEW (struct vn_tables_s);
2050 allocate_vn_table (valid_info);
2051 optimistic_info = XCNEW (struct vn_tables_s);
2052 allocate_vn_table (optimistic_info);
2057 switch_to_PRE_table (void)
2059 pre_info = XCNEW (struct vn_tables_s);
2060 allocate_vn_table (pre_info);
2061 current_info = pre_info;
2069 VEC_free (tree, heap, shared_lookup_phiargs);
2070 VEC_free (tree, gc, shared_lookup_vops);
2071 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2072 XDELETEVEC (rpo_numbers);
2073 for (i = 0; i < num_ssa_names; i++)
2075 tree name = ssa_name (i);
2078 XDELETE (VN_INFO (name));
2079 if (SSA_NAME_VALUE (name) &&
2080 TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2081 SSA_NAME_VALUE (name) = NULL;
2085 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2086 VEC_free (tree, heap, sccstack);
2087 free_vn_table (valid_info);
2088 XDELETE (valid_info);
2089 free_vn_table (optimistic_info);
2090 XDELETE (optimistic_info);
2093 free_vn_table (pre_info);
2098 /* Do SCCVN. Returns true if it finished, false if we bailed out
2099 due to ressource constraints. */
2108 current_info = valid_info;
2110 for (param = DECL_ARGUMENTS (current_function_decl);
2112 param = TREE_CHAIN (param))
2114 if (gimple_default_def (cfun, param) != NULL)
2116 tree def = gimple_default_def (cfun, param);
2117 SSA_VAL (def) = def;
2121 for (i = num_ssa_names - 1; i > 0; i--)
2123 tree name = ssa_name (i);
2125 && VN_INFO (name)->visited == false
2126 && !has_zero_uses (name))
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2136 fprintf (dump_file, "Value numbers:\n");
2137 for (i = 0; i < num_ssa_names; i++)
2139 tree name = ssa_name (i);
2140 if (name && VN_INFO (name)->visited
2141 && (SSA_VAL (name) != name
2142 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2144 print_generic_expr (dump_file, name, 0);
2145 fprintf (dump_file, " = ");
2146 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2147 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2149 print_generic_expr (dump_file, SSA_VAL (name), 0);
2150 fprintf (dump_file, "\n");