1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 /* Nary operations in the hashtable consist of length operands, an
119 opcode, and a type. Result is the value number of the operation,
120 and hashcode is stored to avoid having to calculate it
123 typedef struct vn_nary_op_s
125 ENUM_BITFIELD(tree_code) opcode : 16;
126 unsigned length : 16;
132 typedef const struct vn_nary_op_s *const_vn_nary_op_t;
134 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
135 arguments, and the basic block the phi is in. Result is the value
136 number of the operation, and hashcode is stored to avoid having to
137 calculate it repeatedly. Phi nodes not in the same block are never
138 considered equivalent. */
140 typedef struct vn_phi_s
142 VEC (tree, heap) *phiargs;
147 typedef const struct vn_phi_s *const_vn_phi_t;
149 /* Reference operands only exist in reference operations structures.
150 They consist of an opcode, type, and some number of operands. For
151 a given opcode, some, all, or none of the operands may be used.
152 The operands are there to store the information that makes up the
153 portion of the addressing calculation that opcode performs. */
155 typedef struct vn_reference_op_struct
157 enum tree_code opcode;
162 typedef vn_reference_op_s *vn_reference_op_t;
163 typedef const vn_reference_op_s *const_vn_reference_op_t;
165 DEF_VEC_O(vn_reference_op_s);
166 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
168 /* A reference operation in the hashtable is representation as a
169 collection of vuses, representing the memory state at the time of
170 the operation, and a collection of operands that make up the
171 addressing calculation. If two vn_reference_t's have the same set
172 of operands, they access the same memory location. We also store
173 the resulting value number, and the hashcode. The vuses are
174 always stored in order sorted by ssa name version. */
176 typedef struct vn_reference_s
178 VEC (tree, gc) *vuses;
179 VEC (vn_reference_op_s, heap) *operands;
183 typedef const struct vn_reference_s *const_vn_reference_t;
185 /* Valid hashtables storing information we have proven to be
188 static vn_tables_t valid_info;
190 /* Optimistic hashtables storing information we are making assumptions about
191 during iterations. */
193 static vn_tables_t optimistic_info;
195 /* PRE hashtables storing information about mapping from expressions to
198 static vn_tables_t pre_info;
200 /* Pointer to the set of hashtables that is currently being used.
201 Should always point to either the optimistic_info, or the
204 static vn_tables_t current_info;
207 /* Reverse post order index for each basic block. */
209 static int *rpo_numbers;
211 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
213 /* This represents the top of the VN lattice, which is the universal
218 /* Next DFS number and the stack for strongly connected component
221 static unsigned int next_dfs_num;
222 static VEC (tree, heap) *sccstack;
224 DEF_VEC_P(vn_ssa_aux_t);
225 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
227 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
228 are allocated on an obstack for locality reasons, and to free them
229 without looping over the VEC. */
231 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
232 static struct obstack vn_ssa_aux_obstack;
234 /* Return the value numbering information for a given SSA name. */
239 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
240 SSA_NAME_VERSION (name));
243 /* Set the value numbering info for a given SSA name to a given
247 VN_INFO_SET (tree name, vn_ssa_aux_t value)
249 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
250 SSA_NAME_VERSION (name), value);
253 /* Initialize the value numbering info for a given SSA name.
254 This should be called just once for every SSA name. */
257 VN_INFO_GET (tree name)
259 vn_ssa_aux_t newinfo;
261 newinfo = obstack_alloc (&vn_ssa_aux_obstack, sizeof (struct vn_ssa_aux));
262 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
263 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
264 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
265 SSA_NAME_VERSION (name) + 1);
266 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
267 SSA_NAME_VERSION (name), newinfo);
272 /* Free a phi operation structure VP. */
278 VEC_free (tree, heap, phi->phiargs);
281 /* Free a reference operation structure VP. */
284 free_reference (void *vp)
286 vn_reference_t vr = vp;
287 VEC_free (vn_reference_op_s, heap, vr->operands);
290 /* Compare two reference operands P1 and P2 for equality. return true if
291 they are equal, and false otherwise. */
294 vn_reference_op_eq (const void *p1, const void *p2)
296 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
297 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
298 return vro1->opcode == vro2->opcode
299 && vro1->type == vro2->type
300 && expressions_equal_p (vro1->op0, vro2->op0)
301 && expressions_equal_p (vro1->op1, vro2->op1);
304 /* Compute the hash for a reference operand VRO1 */
307 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
309 return iterative_hash_expr (vro1->op0, vro1->opcode)
310 + iterative_hash_expr (vro1->op1, vro1->opcode);
313 /* Return the hashcode for a given reference operation P1. */
316 vn_reference_hash (const void *p1)
318 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
319 return vr1->hashcode;
322 /* Compute a hash for the reference operation VR1 and return it. */
324 static inline hashval_t
325 vn_reference_compute_hash (const vn_reference_t vr1)
327 hashval_t result = 0;
330 vn_reference_op_t vro;
332 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
333 result += iterative_hash_expr (v, 0);
334 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
335 result += vn_reference_op_compute_hash (vro);
340 /* Return true if reference operations P1 and P2 are equivalent. This
341 means they have the same set of operands and vuses. */
344 vn_reference_eq (const void *p1, const void *p2)
348 vn_reference_op_t vro;
350 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
351 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
353 if (vr1->vuses == vr2->vuses
354 && vr1->operands == vr2->operands)
357 /* Impossible for them to be equivalent if they have different
359 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
362 /* We require that address operands be canonicalized in a way that
363 two memory references will have the same operands if they are
365 if (VEC_length (vn_reference_op_s, vr1->operands)
366 != VEC_length (vn_reference_op_s, vr2->operands))
369 /* The memory state is more often different than the address of the
370 store/load, so check it first. */
371 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
373 if (VEC_index (tree, vr2->vuses, i) != v)
377 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
379 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
386 /* Place the vuses from STMT into *result */
389 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
397 VEC_reserve_exact (tree, gc, *result,
398 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
400 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
401 VEC_quick_push (tree, *result, vuse);
405 /* Copy the VUSE names in STMT into a vector, and return
409 copy_vuses_from_stmt (tree stmt)
411 VEC (tree, gc) *vuses = NULL;
413 vuses_to_vec (stmt, &vuses);
418 /* Place the vdefs from STMT into *result */
421 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
429 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
431 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
432 VEC_quick_push (tree, *result, vdef);
435 /* Copy the names of vdef results in STMT into a vector, and return
438 static VEC (tree, gc) *
439 copy_vdefs_from_stmt (tree stmt)
441 VEC (tree, gc) *vdefs = NULL;
443 vdefs_to_vec (stmt, &vdefs);
448 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
449 static VEC (tree, gc) *shared_lookup_vops;
451 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
452 This function will overwrite the current SHARED_LOOKUP_VOPS
456 shared_vuses_from_stmt (tree stmt)
458 VEC_truncate (tree, shared_lookup_vops, 0);
459 vuses_to_vec (stmt, &shared_lookup_vops);
461 return shared_lookup_vops;
464 /* Copy the operations present in load/store/call REF into RESULT, a vector of
465 vn_reference_op_s's. */
468 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
470 /* Calls are different from all other reference operations. */
471 if (TREE_CODE (ref) == CALL_EXPR)
473 vn_reference_op_s temp;
475 call_expr_arg_iterator iter;
478 /* Copy the call_expr opcode, type, function being called, and
480 memset (&temp, 0, sizeof (temp));
481 temp.type = TREE_TYPE (ref);
482 temp.opcode = CALL_EXPR;
483 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
485 callfn = get_callee_fndecl (ref);
487 callfn = CALL_EXPR_FN (ref);
488 temp.type = TREE_TYPE (callfn);
489 temp.opcode = TREE_CODE (callfn);
491 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
493 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
495 memset (&temp, 0, sizeof (temp));
496 temp.type = TREE_TYPE (callarg);
497 temp.opcode = TREE_CODE (callarg);
499 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
504 /* For non-calls, store the information that makes up the address. */
508 vn_reference_op_s temp;
510 memset (&temp, 0, sizeof (temp));
511 temp.type = TREE_TYPE (ref);
512 temp.opcode = TREE_CODE (ref);
516 case ALIGN_INDIRECT_REF:
517 case MISALIGNED_INDIRECT_REF:
519 /* The only operand is the address, which gets its own
520 vn_reference_op_s structure. */
523 /* Record bits and position. */
524 temp.op0 = TREE_OPERAND (ref, 1);
525 temp.op1 = TREE_OPERAND (ref, 2);
528 /* Record field as operand. */
529 temp.op0 = TREE_OPERAND (ref, 1);
531 case ARRAY_RANGE_REF:
533 /* Record index as operand. */
534 temp.op0 = TREE_OPERAND (ref, 1);
535 temp.op1 = TREE_OPERAND (ref, 3);
551 /* These are only interesting for their operands, their
552 existence, and their type. They will never be the last
553 ref in the chain of references (IE they require an
554 operand), so we don't have to put anything
555 for op* as it will be handled by the iteration */
558 case VIEW_CONVERT_EXPR:
565 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
567 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
568 ref = TREE_OPERAND (ref, 0);
574 /* Create a vector of vn_reference_op_s structures from REF, a
575 REFERENCE_CLASS_P tree. The vector is not shared. */
577 static VEC(vn_reference_op_s, heap) *
578 create_reference_ops_from_ref (tree ref)
580 VEC (vn_reference_op_s, heap) *result = NULL;
582 copy_reference_ops_from_ref (ref, &result);
586 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
588 /* Create a vector of vn_reference_op_s structures from REF, a
589 REFERENCE_CLASS_P tree. The vector is shared among all callers of
592 static VEC(vn_reference_op_s, heap) *
593 shared_reference_ops_from_ref (tree ref)
597 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
598 copy_reference_ops_from_ref (ref, &shared_lookup_references);
599 return shared_lookup_references;
603 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
604 structures into their value numbers. This is done in-place, and
605 the vector passed in is returned. */
607 static VEC (vn_reference_op_s, heap) *
608 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
610 vn_reference_op_t vro;
613 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
615 if (vro->opcode == SSA_NAME
616 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
617 vro->op0 = SSA_VAL (vro->op0);
623 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
624 their value numbers. This is done in-place, and the vector passed
627 static VEC (tree, gc) *
628 valueize_vuses (VEC (tree, gc) *orig)
630 bool made_replacement = false;
634 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
636 if (vuse != SSA_VAL (vuse))
638 made_replacement = true;
639 VEC_replace (tree, orig, i, SSA_VAL (vuse));
643 if (made_replacement && VEC_length (tree, orig) > 1)
649 /* Lookup OP in the current hash table, and return the resulting
650 value number if it exists in the hash table. Return NULL_TREE if
651 it does not exist in the hash table. */
654 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
657 struct vn_reference_s vr1;
659 vr1.vuses = valueize_vuses (vuses);
660 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
661 vr1.hashcode = vn_reference_compute_hash (&vr1);
662 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
664 if (!slot && current_info == optimistic_info)
665 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
670 return ((vn_reference_t)*slot)->result;
673 /* Insert OP into the current hash table with a value number of
677 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
682 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
684 vr1->vuses = valueize_vuses (vuses);
685 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
686 vr1->hashcode = vn_reference_compute_hash (vr1);
687 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
689 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
692 /* Because we lookup stores using vuses, and value number failures
693 using the vdefs (see visit_reference_op_store for how and why),
694 it's possible that on failure we may try to insert an already
695 inserted store. This is not wrong, there is no ssa name for a
696 store that we could use as a differentiator anyway. Thus, unlike
697 the other lookup functions, you cannot gcc_assert (!*slot)
700 /* But free the old slot in case of a collision. */
702 free_reference (*slot);
707 /* Compute and return the hash value for nary operation VBO1. */
709 static inline hashval_t
710 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
715 for (i = 0; i < vno1->length; ++i)
716 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
717 vno1->op[i] = SSA_VAL (vno1->op[i]);
719 if (vno1->length == 2
720 && commutative_tree_code (vno1->opcode)
721 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
723 tree temp = vno1->op[0];
724 vno1->op[0] = vno1->op[1];
728 for (i = 0; i < vno1->length; ++i)
729 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
734 /* Return the computed hashcode for nary operation P1. */
737 vn_nary_op_hash (const void *p1)
739 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
740 return vno1->hashcode;
743 /* Compare nary operations P1 and P2 and return true if they are
747 vn_nary_op_eq (const void *p1, const void *p2)
749 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
750 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
753 if (vno1->opcode != vno2->opcode
754 || vno1->type != vno2->type)
757 for (i = 0; i < vno1->length; ++i)
758 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
764 /* Lookup OP in the current hash table, and return the resulting
765 value number if it exists in the hash table. Return NULL_TREE if
766 it does not exist in the hash table. */
769 vn_nary_op_lookup (tree op)
772 struct vn_nary_op_s vno1;
775 vno1.opcode = TREE_CODE (op);
776 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
777 vno1.type = TREE_TYPE (op);
778 for (i = 0; i < vno1.length; ++i)
779 vno1.op[i] = TREE_OPERAND (op, i);
780 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
781 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
783 if (!slot && current_info == optimistic_info)
784 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
788 return ((vn_nary_op_t)*slot)->result;
791 /* Insert OP into the current hash table with a value number of
795 vn_nary_op_insert (tree op, tree result)
797 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
802 vno1 = obstack_alloc (¤t_info->nary_obstack,
803 (sizeof (struct vn_nary_op_s)
804 - sizeof (tree) * (4 - length)));
805 vno1->opcode = TREE_CODE (op);
806 vno1->length = length;
807 vno1->type = TREE_TYPE (op);
808 for (i = 0; i < vno1->length; ++i)
809 vno1->op[i] = TREE_OPERAND (op, i);
810 vno1->result = result;
811 vno1->hashcode = vn_nary_op_compute_hash (vno1);
812 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
819 /* Compute a hashcode for PHI operation VP1 and return it. */
821 static inline hashval_t
822 vn_phi_compute_hash (vn_phi_t vp1)
824 hashval_t result = 0;
828 result = vp1->block->index;
830 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
832 if (phi1op == VN_TOP)
834 result += iterative_hash_expr (phi1op, result);
840 /* Return the computed hashcode for phi operation P1. */
843 vn_phi_hash (const void *p1)
845 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
846 return vp1->hashcode;
849 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
852 vn_phi_eq (const void *p1, const void *p2)
854 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
855 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
857 if (vp1->block == vp2->block)
862 /* Any phi in the same block will have it's arguments in the
863 same edge order, because of how we store phi nodes. */
864 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
866 tree phi2op = VEC_index (tree, vp2->phiargs, i);
867 if (phi1op == VN_TOP || phi2op == VN_TOP)
869 if (!expressions_equal_p (phi1op, phi2op))
877 static VEC(tree, heap) *shared_lookup_phiargs;
879 /* Lookup PHI in the current hash table, and return the resulting
880 value number if it exists in the hash table. Return NULL_TREE if
881 it does not exist in the hash table. */
884 vn_phi_lookup (tree phi)
890 VEC_truncate (tree, shared_lookup_phiargs, 0);
892 /* Canonicalize the SSA_NAME's to their value number. */
893 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
895 tree def = PHI_ARG_DEF (phi, i);
896 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
897 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
899 vp1.phiargs = shared_lookup_phiargs;
900 vp1.block = bb_for_stmt (phi);
901 vp1.hashcode = vn_phi_compute_hash (&vp1);
902 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
904 if (!slot && current_info == optimistic_info)
905 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
909 return ((vn_phi_t)*slot)->result;
912 /* Insert PHI into the current hash table with a value number of
916 vn_phi_insert (tree phi, tree result)
919 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
921 VEC (tree, heap) *args = NULL;
923 /* Canonicalize the SSA_NAME's to their value number. */
924 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
926 tree def = PHI_ARG_DEF (phi, i);
927 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
928 VEC_safe_push (tree, heap, args, def);
931 vp1->block = bb_for_stmt (phi);
932 vp1->result = result;
933 vp1->hashcode = vn_phi_compute_hash (vp1);
935 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
938 /* Because we iterate over phi operations more than once, it's
939 possible the slot might already exist here, hence no assert.*/
944 /* Print set of components in strongly connected component SCC to OUT. */
947 print_scc (FILE *out, VEC (tree, heap) *scc)
952 fprintf (out, "SCC consists of: ");
953 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
955 print_generic_expr (out, var, 0);
961 /* Set the value number of FROM to TO, return true if it has changed
965 set_ssa_val_to (tree from, tree to)
970 && TREE_CODE (to) == SSA_NAME
971 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
974 /* The only thing we allow as value numbers are VN_TOP, ssa_names
975 and invariants. So assert that here. */
976 gcc_assert (to != NULL_TREE
978 || TREE_CODE (to) == SSA_NAME
979 || is_gimple_min_invariant (to)));
981 if (dump_file && (dump_flags & TDF_DETAILS))
983 fprintf (dump_file, "Setting value number of ");
984 print_generic_expr (dump_file, from, 0);
985 fprintf (dump_file, " to ");
986 print_generic_expr (dump_file, to, 0);
987 fprintf (dump_file, "\n");
990 currval = SSA_VAL (from);
992 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1000 /* Set all definitions in STMT to value number to themselves.
1001 Return true if a value number changed. */
1004 defs_to_varying (tree stmt)
1006 bool changed = false;
1010 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1012 tree def = DEF_FROM_PTR (defp);
1014 VN_INFO (def)->use_processed = true;
1015 changed |= set_ssa_val_to (def, def);
1020 /* Visit a copy between LHS and RHS, return true if the value number
1024 visit_copy (tree lhs, tree rhs)
1027 /* Follow chains of copies to their destination. */
1028 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1029 rhs = SSA_VAL (rhs);
1031 /* The copy may have a more interesting constant filled expression
1032 (we don't, since we know our RHS is just an SSA name). */
1033 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1034 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1036 return set_ssa_val_to (lhs, rhs);
1039 /* Visit a unary operator RHS, value number it, and return true if the
1040 value number of LHS has changed as a result. */
1043 visit_unary_op (tree lhs, tree op)
1045 bool changed = false;
1046 tree result = vn_nary_op_lookup (op);
1050 changed = set_ssa_val_to (lhs, result);
1054 changed = set_ssa_val_to (lhs, lhs);
1055 vn_nary_op_insert (op, lhs);
1061 /* Visit a binary operator RHS, value number it, and return true if the
1062 value number of LHS has changed as a result. */
1065 visit_binary_op (tree lhs, tree op)
1067 bool changed = false;
1068 tree result = vn_nary_op_lookup (op);
1072 changed = set_ssa_val_to (lhs, result);
1076 changed = set_ssa_val_to (lhs, lhs);
1077 vn_nary_op_insert (op, lhs);
1083 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1084 and return true if the value number of the LHS has changed as a result. */
1087 visit_reference_op_load (tree lhs, tree op, tree stmt)
1089 bool changed = false;
1090 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1094 changed = set_ssa_val_to (lhs, result);
1098 changed = set_ssa_val_to (lhs, lhs);
1099 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1106 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1107 and return true if the value number of the LHS has changed as a result. */
1110 visit_reference_op_store (tree lhs, tree op, tree stmt)
1112 bool changed = false;
1114 bool resultsame = false;
1116 /* First we want to lookup using the *vuses* from the store and see
1117 if there the last store to this location with the same address
1120 The vuses represent the memory state before the store. If the
1121 memory state, address, and value of the store is the same as the
1122 last store to this location, then this store will produce the
1123 same memory state as that store.
1125 In this case the vdef versions for this store are value numbered to those
1126 vuse versions, since they represent the same memory state after
1129 Otherwise, the vdefs for the store are used when inserting into
1130 the table, since the store generates a new memory state. */
1132 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1136 if (TREE_CODE (result) == SSA_NAME)
1137 result = SSA_VAL (result);
1138 if (TREE_CODE (op) == SSA_NAME)
1140 resultsame = expressions_equal_p (result, op);
1143 if (!result || !resultsame)
1145 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1149 if (dump_file && (dump_flags & TDF_DETAILS))
1151 fprintf (dump_file, "No store match\n");
1152 fprintf (dump_file, "Value numbering store ");
1153 print_generic_expr (dump_file, lhs, 0);
1154 fprintf (dump_file, " to ");
1155 print_generic_expr (dump_file, op, 0);
1156 fprintf (dump_file, "\n");
1158 /* Have to set value numbers before insert, since insert is
1159 going to valueize the references in-place. */
1160 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1162 VN_INFO (vdef)->use_processed = true;
1163 changed |= set_ssa_val_to (vdef, vdef);
1166 /* Do not insert structure copies into the tables. */
1167 if (is_gimple_min_invariant (op)
1168 || is_gimple_reg (op))
1169 vn_reference_insert (lhs, op, vdefs);
1173 /* We had a match, so value number the vdefs to have the value
1174 number of the vuses they came from. */
1175 ssa_op_iter op_iter;
1179 if (dump_file && (dump_flags & TDF_DETAILS))
1180 fprintf (dump_file, "Store matched earlier value,"
1181 "value numbering store vdefs to matching vuses.\n");
1183 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1185 tree def = DEF_FROM_PTR (var);
1188 /* Uh, if the vuse is a multiuse, we can't really do much
1189 here, sadly, since we don't know which value number of
1190 which vuse to use. */
1191 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1194 use = VUSE_ELEMENT_VAR (*vv, 0);
1196 VN_INFO (def)->use_processed = true;
1197 changed |= set_ssa_val_to (def, SSA_VAL (use));
1204 /* Visit and value number PHI, return true if the value number
1208 visit_phi (tree phi)
1210 bool changed = false;
1212 tree sameval = VN_TOP;
1213 bool allsame = true;
1216 /* TODO: We could check for this in init_sccvn, and replace this
1217 with a gcc_assert. */
1218 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1219 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1221 /* See if all non-TOP arguments have the same value. TOP is
1222 equivalent to everything, so we can ignore it. */
1223 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1225 tree def = PHI_ARG_DEF (phi, i);
1227 if (TREE_CODE (def) == SSA_NAME)
1228 def = SSA_VAL (def);
1231 if (sameval == VN_TOP)
1237 if (!expressions_equal_p (def, sameval))
1245 /* If all value numbered to the same value, the phi node has that
1249 if (is_gimple_min_invariant (sameval))
1251 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1252 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1256 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1257 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1260 if (TREE_CODE (sameval) == SSA_NAME)
1261 return visit_copy (PHI_RESULT (phi), sameval);
1263 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1266 /* Otherwise, see if it is equivalent to a phi node in this block. */
1267 result = vn_phi_lookup (phi);
1270 if (TREE_CODE (result) == SSA_NAME)
1271 changed = visit_copy (PHI_RESULT (phi), result);
1273 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1277 vn_phi_insert (phi, PHI_RESULT (phi));
1278 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1279 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1280 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1286 /* Return true if EXPR contains constants. */
1289 expr_has_constants (tree expr)
1291 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1294 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1297 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1298 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1299 /* Constants inside reference ops are rarely interesting, but
1300 it can take a lot of looking to find them. */
1302 case tcc_declaration:
1305 return is_gimple_min_invariant (expr);
1310 /* Replace SSA_NAMES in expr with their value numbers, and return the
1312 This is performed in place. */
1315 valueize_expr (tree expr)
1317 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1320 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1321 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1322 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1325 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1326 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1327 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1328 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1329 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1330 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1338 /* Simplify the binary expression RHS, and return the result if
1342 simplify_binary_expression (tree stmt, tree rhs)
1344 tree result = NULL_TREE;
1345 tree op0 = TREE_OPERAND (rhs, 0);
1346 tree op1 = TREE_OPERAND (rhs, 1);
1348 /* This will not catch every single case we could combine, but will
1349 catch those with constants. The goal here is to simultaneously
1350 combine constants between expressions, but avoid infinite
1351 expansion of expressions during simplification. */
1352 if (TREE_CODE (op0) == SSA_NAME)
1354 if (VN_INFO (op0)->has_constants)
1355 op0 = valueize_expr (VN_INFO (op0)->expr);
1356 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1357 op0 = SSA_VAL (op0);
1360 if (TREE_CODE (op1) == SSA_NAME)
1362 if (VN_INFO (op1)->has_constants)
1363 op1 = valueize_expr (VN_INFO (op1)->expr);
1364 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1365 op1 = SSA_VAL (op1);
1368 /* Avoid folding if nothing changed. */
1369 if (op0 == TREE_OPERAND (rhs, 0)
1370 && op1 == TREE_OPERAND (rhs, 1))
1373 fold_defer_overflow_warnings ();
1375 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1377 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1380 /* Make sure result is not a complex expression consisting
1381 of operators of operators (IE (a + b) + (a + c))
1382 Otherwise, we will end up with unbounded expressions if
1383 fold does anything at all. */
1384 if (result && valid_gimple_expression_p (result))
1390 /* Simplify the unary expression RHS, and return the result if
1394 simplify_unary_expression (tree rhs)
1396 tree result = NULL_TREE;
1397 tree op0 = TREE_OPERAND (rhs, 0);
1399 if (TREE_CODE (op0) != SSA_NAME)
1402 if (VN_INFO (op0)->has_constants)
1403 op0 = valueize_expr (VN_INFO (op0)->expr);
1404 else if (TREE_CODE (rhs) == NOP_EXPR
1405 || TREE_CODE (rhs) == CONVERT_EXPR
1406 || TREE_CODE (rhs) == REALPART_EXPR
1407 || TREE_CODE (rhs) == IMAGPART_EXPR
1408 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1410 /* We want to do tree-combining on conversion-like expressions.
1411 Make sure we feed only SSA_NAMEs or constants to fold though. */
1412 tree tem = valueize_expr (VN_INFO (op0)->expr);
1413 if (UNARY_CLASS_P (tem)
1414 || BINARY_CLASS_P (tem)
1415 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1416 || TREE_CODE (tem) == SSA_NAME
1417 || is_gimple_min_invariant (tem))
1421 /* Avoid folding if nothing changed, but remember the expression. */
1422 if (op0 == TREE_OPERAND (rhs, 0))
1425 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1428 STRIP_USELESS_TYPE_CONVERSION (result);
1429 if (valid_gimple_expression_p (result))
1436 /* Try to simplify RHS using equivalences and constant folding. */
1439 try_to_simplify (tree stmt, tree rhs)
1441 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1442 in this case, there is no point in doing extra work. */
1443 if (TREE_CODE (rhs) == SSA_NAME)
1447 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1449 /* For references, see if we find a result for the lookup,
1450 and use it if we do. */
1451 case tcc_declaration:
1452 /* Pull out any truly constant values. */
1453 if (TREE_READONLY (rhs)
1454 && TREE_STATIC (rhs)
1455 && DECL_INITIAL (rhs)
1456 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1457 return DECL_INITIAL (rhs);
1461 /* Do not do full-blown reference lookup here.
1462 ??? But like for tcc_declaration, we should simplify
1463 from constant initializers. */
1465 /* Fallthrough for some codes that can operate on registers. */
1466 if (!(TREE_CODE (rhs) == REALPART_EXPR
1467 || TREE_CODE (rhs) == IMAGPART_EXPR
1468 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1470 /* We could do a little more with unary ops, if they expand
1471 into binary ops, but it's debatable whether it is worth it. */
1473 return simplify_unary_expression (rhs);
1475 case tcc_comparison:
1477 return simplify_binary_expression (stmt, rhs);
1486 /* Visit and value number USE, return true if the value number
1490 visit_use (tree use)
1492 bool changed = false;
1493 tree stmt = SSA_NAME_DEF_STMT (use);
1496 VN_INFO (use)->use_processed = true;
1498 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1499 if (dump_file && (dump_flags & TDF_DETAILS))
1501 fprintf (dump_file, "Value numbering ");
1502 print_generic_expr (dump_file, use, 0);
1503 fprintf (dump_file, " stmt = ");
1504 print_generic_stmt (dump_file, stmt, 0);
1507 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1508 if (TREE_CODE (stmt) == RETURN_EXPR
1509 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1510 stmt = TREE_OPERAND (stmt, 0);
1512 ann = stmt_ann (stmt);
1514 /* Handle uninitialized uses. */
1515 if (IS_EMPTY_STMT (stmt))
1517 changed = set_ssa_val_to (use, use);
1521 if (TREE_CODE (stmt) == PHI_NODE)
1523 changed = visit_phi (stmt);
1525 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1526 || (ann && ann->has_volatile_ops)
1527 || tree_could_throw_p (stmt))
1529 changed = defs_to_varying (stmt);
1533 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1534 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1537 STRIP_USELESS_TYPE_CONVERSION (rhs);
1539 /* Shortcut for copies. Simplifying copies is pointless,
1540 since we copy the expression and value they represent. */
1541 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1543 changed = visit_copy (lhs, rhs);
1546 simplified = try_to_simplify (stmt, rhs);
1547 if (simplified && simplified != rhs)
1549 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, "RHS ");
1552 print_generic_expr (dump_file, rhs, 0);
1553 fprintf (dump_file, " simplified to ");
1554 print_generic_expr (dump_file, simplified, 0);
1555 if (TREE_CODE (lhs) == SSA_NAME)
1556 fprintf (dump_file, " has constants %d\n",
1557 VN_INFO (lhs)->has_constants);
1559 fprintf (dump_file, "\n");
1563 /* Setting value numbers to constants will occasionally
1564 screw up phi congruence because constants are not
1565 uniquely associated with a single ssa name that can be
1567 if (simplified && is_gimple_min_invariant (simplified)
1568 && TREE_CODE (lhs) == SSA_NAME
1569 && simplified != rhs)
1571 VN_INFO (lhs)->expr = simplified;
1572 VN_INFO (lhs)->has_constants = true;
1573 changed = set_ssa_val_to (lhs, simplified);
1576 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1577 && TREE_CODE (lhs) == SSA_NAME)
1579 changed = visit_copy (lhs, simplified);
1582 else if (simplified)
1584 if (TREE_CODE (lhs) == SSA_NAME)
1586 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1587 /* We have to unshare the expression or else
1588 valuizing may change the IL stream. */
1589 VN_INFO (lhs)->expr = unshare_expr (simplified);
1593 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1595 VN_INFO (lhs)->has_constants = true;
1596 VN_INFO (lhs)->expr = unshare_expr (rhs);
1598 else if (TREE_CODE (lhs) == SSA_NAME)
1600 /* We reset expr and constantness here because we may
1601 have been value numbering optimistically, and
1602 iterating. They may become non-constant in this case,
1603 even if they were optimistically constant. */
1605 VN_INFO (lhs)->has_constants = false;
1606 VN_INFO (lhs)->expr = lhs;
1609 if (TREE_CODE (lhs) == SSA_NAME
1610 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1611 changed = defs_to_varying (stmt);
1612 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1614 changed = visit_reference_op_store (lhs, rhs, stmt);
1616 else if (TREE_CODE (lhs) == SSA_NAME)
1618 if (is_gimple_min_invariant (rhs))
1620 VN_INFO (lhs)->has_constants = true;
1621 VN_INFO (lhs)->expr = rhs;
1622 changed = set_ssa_val_to (lhs, rhs);
1626 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1629 changed = visit_unary_op (lhs, rhs);
1632 changed = visit_binary_op (lhs, rhs);
1634 /* If tcc_vl_expr ever encompasses more than
1635 CALL_EXPR, this will need to be changed. */
1637 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1638 changed = visit_reference_op_load (lhs, rhs, stmt);
1640 changed = defs_to_varying (stmt);
1642 case tcc_declaration:
1644 changed = visit_reference_op_load (lhs, rhs, stmt);
1646 case tcc_expression:
1647 if (TREE_CODE (rhs) == ADDR_EXPR)
1649 changed = visit_unary_op (lhs, rhs);
1654 changed = defs_to_varying (stmt);
1660 changed = defs_to_varying (stmt);
1667 /* Compare two operands by reverse postorder index */
1670 compare_ops (const void *pa, const void *pb)
1672 const tree opa = *((const tree *)pa);
1673 const tree opb = *((const tree *)pb);
1674 tree opstmta = SSA_NAME_DEF_STMT (opa);
1675 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1679 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1681 else if (IS_EMPTY_STMT (opstmta))
1683 else if (IS_EMPTY_STMT (opstmtb))
1686 bba = bb_for_stmt (opstmta);
1687 bbb = bb_for_stmt (opstmtb);
1698 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1700 else if (TREE_CODE (opstmta) == PHI_NODE)
1702 else if (TREE_CODE (opstmtb) == PHI_NODE)
1704 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1706 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1709 /* Sort an array containing members of a strongly connected component
1710 SCC so that the members are ordered by RPO number.
1711 This means that when the sort is complete, iterating through the
1712 array will give you the members in RPO order. */
1715 sort_scc (VEC (tree, heap) *scc)
1717 qsort (VEC_address (tree, scc),
1718 VEC_length (tree, scc),
1723 /* Process a strongly connected component in the SSA graph. */
1726 process_scc (VEC (tree, heap) *scc)
1728 /* If the SCC has a single member, just visit it. */
1730 if (VEC_length (tree, scc) == 1)
1732 tree use = VEC_index (tree, scc, 0);
1733 if (!VN_INFO (use)->use_processed)
1740 unsigned int iterations = 0;
1741 bool changed = true;
1743 /* Iterate over the SCC with the optimistic table until it stops
1745 current_info = optimistic_info;
1750 htab_empty (optimistic_info->nary);
1751 htab_empty (optimistic_info->phis);
1752 htab_empty (optimistic_info->references);
1753 obstack_free (&optimistic_info->nary_obstack, NULL);
1754 gcc_obstack_init (&optimistic_info->nary_obstack);
1755 empty_alloc_pool (optimistic_info->phis_pool);
1756 empty_alloc_pool (optimistic_info->references_pool);
1757 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1758 changed |= visit_use (var);
1761 if (dump_file && (dump_flags & TDF_STATS))
1762 fprintf (dump_file, "Processing SCC required %d iterations\n",
1765 /* Finally, visit the SCC once using the valid table. */
1766 current_info = valid_info;
1767 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1772 /* Depth first search on NAME to discover and process SCC's in the SSA
1774 Execution of this algorithm relies on the fact that the SCC's are
1775 popped off the stack in topological order.
1776 Returns true if successful, false if we stopped processing SCC's due
1777 to ressource constraints. */
1787 VN_INFO (name)->dfsnum = next_dfs_num++;
1788 VN_INFO (name)->visited = true;
1789 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1791 VEC_safe_push (tree, heap, sccstack, name);
1792 VN_INFO (name)->on_sccstack = true;
1793 defstmt = SSA_NAME_DEF_STMT (name);
1795 /* Recursively DFS on our operands, looking for SCC's. */
1796 if (!IS_EMPTY_STMT (defstmt))
1798 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1801 tree use = USE_FROM_PTR (usep);
1803 /* Since we handle phi nodes, we will sometimes get
1804 invariants in the use expression. */
1805 if (TREE_CODE (use) != SSA_NAME)
1808 if (! (VN_INFO (use)->visited))
1812 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1813 VN_INFO (use)->low);
1815 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1816 && VN_INFO (use)->on_sccstack)
1818 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1819 VN_INFO (name)->low);
1824 /* See if we found an SCC. */
1825 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1827 VEC (tree, heap) *scc = NULL;
1830 /* Found an SCC, pop the components off the SCC stack and
1834 x = VEC_pop (tree, sccstack);
1836 VN_INFO (x)->on_sccstack = false;
1837 VEC_safe_push (tree, heap, scc, x);
1838 } while (x != name);
1840 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1841 if (VEC_length (tree, scc)
1842 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1845 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1846 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1847 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1851 if (VEC_length (tree, scc) > 1)
1854 if (dump_file && (dump_flags & TDF_DETAILS))
1855 print_scc (dump_file, scc);
1859 VEC_free (tree, heap, scc);
1865 /* Allocate a value number table. */
1868 allocate_vn_table (vn_tables_t table)
1870 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1871 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
1872 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1875 gcc_obstack_init (&table->nary_obstack);
1876 table->phis_pool = create_alloc_pool ("VN phis",
1877 sizeof (struct vn_phi_s),
1879 table->references_pool = create_alloc_pool ("VN references",
1880 sizeof (struct vn_reference_s),
1884 /* Free a value number table. */
1887 free_vn_table (vn_tables_t table)
1889 htab_delete (table->phis);
1890 htab_delete (table->nary);
1891 htab_delete (table->references);
1892 obstack_free (&table->nary_obstack, NULL);
1893 free_alloc_pool (table->phis_pool);
1894 free_alloc_pool (table->references_pool);
1902 int *rpo_numbers_temp;
1906 calculate_dominance_info (CDI_DOMINATORS);
1910 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1911 /* VEC_alloc doesn't actually grow it to the right size, it just
1912 preallocates the space to do so. */
1913 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1914 gcc_obstack_init (&vn_ssa_aux_obstack);
1916 shared_lookup_phiargs = NULL;
1917 shared_lookup_vops = NULL;
1918 shared_lookup_references = NULL;
1919 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1920 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
1921 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
1923 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
1924 the i'th block in RPO order is bb. We want to map bb's to RPO
1925 numbers, so we need to rearrange this array. */
1926 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
1927 rpo_numbers[rpo_numbers_temp[j]] = j;
1929 XDELETE (rpo_numbers_temp);
1931 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
1933 /* Create the VN_INFO structures, and initialize value numbers to
1935 for (i = 0; i < num_ssa_names; i++)
1937 tree name = ssa_name (i);
1940 VN_INFO_GET (name)->valnum = VN_TOP;
1941 VN_INFO (name)->expr = name;
1947 block_stmt_iterator bsi;
1948 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1950 tree stmt = bsi_stmt (bsi);
1951 stmt_ann (stmt)->uid = id++;
1955 /* Create the valid and optimistic value numbering tables. */
1956 valid_info = XCNEW (struct vn_tables_s);
1957 allocate_vn_table (valid_info);
1958 optimistic_info = XCNEW (struct vn_tables_s);
1959 allocate_vn_table (optimistic_info);
1964 switch_to_PRE_table (void)
1966 pre_info = XCNEW (struct vn_tables_s);
1967 allocate_vn_table (pre_info);
1968 current_info = pre_info;
1976 VEC_free (tree, heap, shared_lookup_phiargs);
1977 VEC_free (tree, gc, shared_lookup_vops);
1978 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
1979 XDELETEVEC (rpo_numbers);
1981 for (i = 0; i < num_ssa_names; i++)
1983 tree name = ssa_name (i);
1985 && SSA_NAME_VALUE (name)
1986 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
1987 SSA_NAME_VALUE (name) = NULL;
1989 obstack_free (&vn_ssa_aux_obstack, NULL);
1990 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
1992 VEC_free (tree, heap, sccstack);
1993 free_vn_table (valid_info);
1994 XDELETE (valid_info);
1995 free_vn_table (optimistic_info);
1996 XDELETE (optimistic_info);
1999 free_vn_table (pre_info);
2004 /* Do SCCVN. Returns true if it finished, false if we bailed out
2005 due to ressource constraints. */
2014 current_info = valid_info;
2016 for (param = DECL_ARGUMENTS (current_function_decl);
2018 param = TREE_CHAIN (param))
2020 if (gimple_default_def (cfun, param) != NULL)
2022 tree def = gimple_default_def (cfun, param);
2023 SSA_VAL (def) = def;
2027 for (i = num_ssa_names - 1; i > 0; i--)
2029 tree name = ssa_name (i);
2031 && VN_INFO (name)->visited == false
2032 && !has_zero_uses (name))
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2042 fprintf (dump_file, "Value numbers:\n");
2043 for (i = 0; i < num_ssa_names; i++)
2045 tree name = ssa_name (i);
2046 if (name && VN_INFO (name)->visited
2047 && (SSA_VAL (name) != name
2048 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2050 print_generic_expr (dump_file, name, 0);
2051 fprintf (dump_file, " = ");
2052 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2053 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2055 print_generic_expr (dump_file, SSA_VAL (name), 0);
2056 fprintf (dump_file, "\n");