1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 /* Nary operations in the hashtable consist of length operands, an
119 opcode, and a type. Result is the value number of the operation,
120 and hashcode is stored to avoid having to calculate it
123 typedef struct vn_nary_op_s
125 ENUM_BITFIELD(tree_code) opcode : 16;
126 unsigned length : 16;
132 typedef const struct vn_nary_op_s *const_vn_nary_op_t;
134 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
135 arguments, and the basic block the phi is in. Result is the value
136 number of the operation, and hashcode is stored to avoid having to
137 calculate it repeatedly. Phi nodes not in the same block are never
138 considered equivalent. */
140 typedef struct vn_phi_s
142 VEC (tree, heap) *phiargs;
147 typedef const struct vn_phi_s *const_vn_phi_t;
149 /* Reference operands only exist in reference operations structures.
150 They consist of an opcode, type, and some number of operands. For
151 a given opcode, some, all, or none of the operands may be used.
152 The operands are there to store the information that makes up the
153 portion of the addressing calculation that opcode performs. */
155 typedef struct vn_reference_op_struct
157 enum tree_code opcode;
162 typedef vn_reference_op_s *vn_reference_op_t;
163 typedef const vn_reference_op_s *const_vn_reference_op_t;
165 DEF_VEC_O(vn_reference_op_s);
166 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
168 /* A reference operation in the hashtable is representation as a
169 collection of vuses, representing the memory state at the time of
170 the operation, and a collection of operands that make up the
171 addressing calculation. If two vn_reference_t's have the same set
172 of operands, they access the same memory location. We also store
173 the resulting value number, and the hashcode. The vuses are
174 always stored in order sorted by ssa name version. */
176 typedef struct vn_reference_s
178 VEC (tree, gc) *vuses;
179 VEC (vn_reference_op_s, heap) *operands;
183 typedef const struct vn_reference_s *const_vn_reference_t;
185 /* Valid hashtables storing information we have proven to be
188 static vn_tables_t valid_info;
190 /* Optimistic hashtables storing information we are making assumptions about
191 during iterations. */
193 static vn_tables_t optimistic_info;
195 /* PRE hashtables storing information about mapping from expressions to
198 static vn_tables_t pre_info;
200 /* Pointer to the set of hashtables that is currently being used.
201 Should always point to either the optimistic_info, or the
204 static vn_tables_t current_info;
207 /* Reverse post order index for each basic block. */
209 static int *rpo_numbers;
211 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
213 /* This represents the top of the VN lattice, which is the universal
218 /* Next DFS number and the stack for strongly connected component
221 static unsigned int next_dfs_num;
222 static VEC (tree, heap) *sccstack;
224 static bool may_insert;
227 DEF_VEC_P(vn_ssa_aux_t);
228 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
230 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
231 are allocated on an obstack for locality reasons, and to free them
232 without looping over the VEC. */
234 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
235 static struct obstack vn_ssa_aux_obstack;
237 /* Return the value numbering information for a given SSA name. */
242 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
243 SSA_NAME_VERSION (name));
246 /* Set the value numbering info for a given SSA name to a given
250 VN_INFO_SET (tree name, vn_ssa_aux_t value)
252 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name), value);
256 /* Initialize the value numbering info for a given SSA name.
257 This should be called just once for every SSA name. */
260 VN_INFO_GET (tree name)
262 vn_ssa_aux_t newinfo;
264 newinfo = obstack_alloc (&vn_ssa_aux_obstack, sizeof (struct vn_ssa_aux));
265 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
266 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
267 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
268 SSA_NAME_VERSION (name) + 1);
269 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
270 SSA_NAME_VERSION (name), newinfo);
275 /* Free a phi operation structure VP. */
281 VEC_free (tree, heap, phi->phiargs);
284 /* Free a reference operation structure VP. */
287 free_reference (void *vp)
289 vn_reference_t vr = vp;
290 VEC_free (vn_reference_op_s, heap, vr->operands);
293 /* Compare two reference operands P1 and P2 for equality. return true if
294 they are equal, and false otherwise. */
297 vn_reference_op_eq (const void *p1, const void *p2)
299 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
300 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
301 return vro1->opcode == vro2->opcode
302 && vro1->type == vro2->type
303 && expressions_equal_p (vro1->op0, vro2->op0)
304 && expressions_equal_p (vro1->op1, vro2->op1);
307 /* Compute the hash for a reference operand VRO1 */
310 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
312 return iterative_hash_expr (vro1->op0, vro1->opcode)
313 + iterative_hash_expr (vro1->op1, vro1->opcode);
316 /* Return the hashcode for a given reference operation P1. */
319 vn_reference_hash (const void *p1)
321 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
322 return vr1->hashcode;
325 /* Compute a hash for the reference operation VR1 and return it. */
327 static inline hashval_t
328 vn_reference_compute_hash (const vn_reference_t vr1)
330 hashval_t result = 0;
333 vn_reference_op_t vro;
335 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
336 result += iterative_hash_expr (v, 0);
337 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
338 result += vn_reference_op_compute_hash (vro);
343 /* Return true if reference operations P1 and P2 are equivalent. This
344 means they have the same set of operands and vuses. */
347 vn_reference_eq (const void *p1, const void *p2)
351 vn_reference_op_t vro;
353 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
354 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
356 if (vr1->vuses == vr2->vuses
357 && vr1->operands == vr2->operands)
360 /* Impossible for them to be equivalent if they have different
362 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
365 /* We require that address operands be canonicalized in a way that
366 two memory references will have the same operands if they are
368 if (VEC_length (vn_reference_op_s, vr1->operands)
369 != VEC_length (vn_reference_op_s, vr2->operands))
372 /* The memory state is more often different than the address of the
373 store/load, so check it first. */
374 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
376 if (VEC_index (tree, vr2->vuses, i) != v)
380 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
382 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
389 /* Place the vuses from STMT into *result */
392 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
400 VEC_reserve_exact (tree, gc, *result,
401 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
403 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
404 VEC_quick_push (tree, *result, vuse);
408 /* Copy the VUSE names in STMT into a vector, and return
412 copy_vuses_from_stmt (tree stmt)
414 VEC (tree, gc) *vuses = NULL;
416 vuses_to_vec (stmt, &vuses);
421 /* Place the vdefs from STMT into *result */
424 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
432 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
434 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
435 VEC_quick_push (tree, *result, vdef);
438 /* Copy the names of vdef results in STMT into a vector, and return
441 static VEC (tree, gc) *
442 copy_vdefs_from_stmt (tree stmt)
444 VEC (tree, gc) *vdefs = NULL;
446 vdefs_to_vec (stmt, &vdefs);
451 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
452 static VEC (tree, gc) *shared_lookup_vops;
454 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
455 This function will overwrite the current SHARED_LOOKUP_VOPS
459 shared_vuses_from_stmt (tree stmt)
461 VEC_truncate (tree, shared_lookup_vops, 0);
462 vuses_to_vec (stmt, &shared_lookup_vops);
464 return shared_lookup_vops;
467 /* Copy the operations present in load/store/call REF into RESULT, a vector of
468 vn_reference_op_s's. */
471 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
473 /* Calls are different from all other reference operations. */
474 if (TREE_CODE (ref) == CALL_EXPR)
476 vn_reference_op_s temp;
478 call_expr_arg_iterator iter;
481 /* Copy the call_expr opcode, type, function being called, and
483 memset (&temp, 0, sizeof (temp));
484 temp.type = TREE_TYPE (ref);
485 temp.opcode = CALL_EXPR;
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 callfn = get_callee_fndecl (ref);
490 callfn = CALL_EXPR_FN (ref);
491 temp.type = TREE_TYPE (callfn);
492 temp.opcode = TREE_CODE (callfn);
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
496 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
498 memset (&temp, 0, sizeof (temp));
499 temp.type = TREE_TYPE (callarg);
500 temp.opcode = TREE_CODE (callarg);
502 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
507 /* For non-calls, store the information that makes up the address. */
511 vn_reference_op_s temp;
513 memset (&temp, 0, sizeof (temp));
514 /* We do not care for spurious type qualifications. */
515 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
516 temp.opcode = TREE_CODE (ref);
520 case ALIGN_INDIRECT_REF:
521 case MISALIGNED_INDIRECT_REF:
523 /* The only operand is the address, which gets its own
524 vn_reference_op_s structure. */
527 /* Record bits and position. */
528 temp.op0 = TREE_OPERAND (ref, 1);
529 temp.op1 = TREE_OPERAND (ref, 2);
532 /* The field decl is enough to unambiguously specify the field,
533 a matching type is not necessary and a mismatching type
534 is always a spurious difference. */
535 temp.type = NULL_TREE;
536 /* If this is a reference to a union member, record the union
537 member size as operand. Do so only if we are doing
538 expression insertion (during FRE), as PRE currently gets
539 confused with this. */
541 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
542 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
543 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
544 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
546 /* Record field as operand. */
547 temp.op0 = TREE_OPERAND (ref, 1);
549 case ARRAY_RANGE_REF:
551 /* Record index as operand. */
552 temp.op0 = TREE_OPERAND (ref, 1);
553 temp.op1 = TREE_OPERAND (ref, 3);
569 /* These are only interesting for their operands, their
570 existence, and their type. They will never be the last
571 ref in the chain of references (IE they require an
572 operand), so we don't have to put anything
573 for op* as it will be handled by the iteration */
576 case VIEW_CONVERT_EXPR:
583 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
585 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
586 ref = TREE_OPERAND (ref, 0);
592 /* Create a vector of vn_reference_op_s structures from REF, a
593 REFERENCE_CLASS_P tree. The vector is not shared. */
595 static VEC(vn_reference_op_s, heap) *
596 create_reference_ops_from_ref (tree ref)
598 VEC (vn_reference_op_s, heap) *result = NULL;
600 copy_reference_ops_from_ref (ref, &result);
604 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
606 /* Create a vector of vn_reference_op_s structures from REF, a
607 REFERENCE_CLASS_P tree. The vector is shared among all callers of
610 static VEC(vn_reference_op_s, heap) *
611 shared_reference_ops_from_ref (tree ref)
615 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
616 copy_reference_ops_from_ref (ref, &shared_lookup_references);
617 return shared_lookup_references;
621 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
622 structures into their value numbers. This is done in-place, and
623 the vector passed in is returned. */
625 static VEC (vn_reference_op_s, heap) *
626 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
628 vn_reference_op_t vro;
631 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
633 if (vro->opcode == SSA_NAME
634 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
635 vro->op0 = SSA_VAL (vro->op0);
641 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
642 their value numbers. This is done in-place, and the vector passed
645 static VEC (tree, gc) *
646 valueize_vuses (VEC (tree, gc) *orig)
648 bool made_replacement = false;
652 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
654 if (vuse != SSA_VAL (vuse))
656 made_replacement = true;
657 VEC_replace (tree, orig, i, SSA_VAL (vuse));
661 if (made_replacement && VEC_length (tree, orig) > 1)
667 /* Return the single reference statement defining all virtual uses
668 in VUSES or NULL_TREE, if there are multiple defining statements.
669 Take into account only definitions that alias REF if following
673 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
678 gcc_assert (VEC_length (tree, vuses) >= 1);
680 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
681 if (TREE_CODE (def_stmt) == PHI_NODE)
683 /* We can only handle lookups over PHI nodes for a single
685 if (VEC_length (tree, vuses) == 1)
687 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
694 /* Verify each VUSE reaches the same defining stmt. */
695 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
697 tree tmp = SSA_NAME_DEF_STMT (vuse);
702 /* Now see if the definition aliases ref, and loop until it does. */
705 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
706 && !get_call_expr_in (def_stmt)
707 && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
708 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
713 /* Lookup a SCCVN reference operation VR in the current hash table.
714 Returns the resulting value number if it exists in the hash table,
715 NULL_TREE otherwise. */
718 vn_reference_lookup_1 (vn_reference_t vr)
724 slot = htab_find_slot_with_hash (current_info->references, vr,
726 if (!slot && current_info == optimistic_info)
727 slot = htab_find_slot_with_hash (valid_info->references, vr,
730 return ((vn_reference_t)*slot)->result;
735 /* Lookup OP in the current hash table, and return the resulting
736 value number if it exists in the hash table. Return NULL_TREE if
737 it does not exist in the hash table. */
740 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk)
742 struct vn_reference_s vr1;
743 tree result, def_stmt;
745 vr1.vuses = valueize_vuses (vuses);
746 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
747 vr1.hashcode = vn_reference_compute_hash (&vr1);
748 result = vn_reference_lookup_1 (&vr1);
750 /* If there is a single defining statement for all virtual uses, we can
751 use that, following virtual use-def chains. */
755 && VEC_length (tree, vr1.vuses) >= 1
756 && !get_call_expr_in (op)
757 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
758 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
759 /* If there is a call involved, op must be assumed to
761 && !get_call_expr_in (def_stmt))
763 /* We are now at an aliasing definition for the vuses we want to
764 look up. Re-do the lookup with the vdefs for this stmt. */
765 vdefs_to_vec (def_stmt, &vuses);
766 vr1.vuses = valueize_vuses (vuses);
767 vr1.hashcode = vn_reference_compute_hash (&vr1);
768 result = vn_reference_lookup_1 (&vr1);
774 /* Insert OP into the current hash table with a value number of
778 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
783 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
785 vr1->vuses = valueize_vuses (vuses);
786 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
787 vr1->hashcode = vn_reference_compute_hash (vr1);
788 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
790 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
793 /* Because we lookup stores using vuses, and value number failures
794 using the vdefs (see visit_reference_op_store for how and why),
795 it's possible that on failure we may try to insert an already
796 inserted store. This is not wrong, there is no ssa name for a
797 store that we could use as a differentiator anyway. Thus, unlike
798 the other lookup functions, you cannot gcc_assert (!*slot)
801 /* But free the old slot in case of a collision. */
803 free_reference (*slot);
808 /* Compute and return the hash value for nary operation VBO1. */
810 static inline hashval_t
811 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
816 for (i = 0; i < vno1->length; ++i)
817 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
818 vno1->op[i] = SSA_VAL (vno1->op[i]);
820 if (vno1->length == 2
821 && commutative_tree_code (vno1->opcode)
822 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
824 tree temp = vno1->op[0];
825 vno1->op[0] = vno1->op[1];
829 for (i = 0; i < vno1->length; ++i)
830 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
835 /* Return the computed hashcode for nary operation P1. */
838 vn_nary_op_hash (const void *p1)
840 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
841 return vno1->hashcode;
844 /* Compare nary operations P1 and P2 and return true if they are
848 vn_nary_op_eq (const void *p1, const void *p2)
850 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
851 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
854 if (vno1->opcode != vno2->opcode
855 || vno1->type != vno2->type)
858 for (i = 0; i < vno1->length; ++i)
859 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
865 /* Lookup OP in the current hash table, and return the resulting
866 value number if it exists in the hash table. Return NULL_TREE if
867 it does not exist in the hash table. */
870 vn_nary_op_lookup (tree op)
873 struct vn_nary_op_s vno1;
876 vno1.opcode = TREE_CODE (op);
877 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
878 vno1.type = TREE_TYPE (op);
879 for (i = 0; i < vno1.length; ++i)
880 vno1.op[i] = TREE_OPERAND (op, i);
881 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
882 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
884 if (!slot && current_info == optimistic_info)
885 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
889 return ((vn_nary_op_t)*slot)->result;
892 /* Insert OP into the current hash table with a value number of
896 vn_nary_op_insert (tree op, tree result)
898 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
903 vno1 = obstack_alloc (¤t_info->nary_obstack,
904 (sizeof (struct vn_nary_op_s)
905 - sizeof (tree) * (4 - length)));
906 vno1->opcode = TREE_CODE (op);
907 vno1->length = length;
908 vno1->type = TREE_TYPE (op);
909 for (i = 0; i < vno1->length; ++i)
910 vno1->op[i] = TREE_OPERAND (op, i);
911 vno1->result = result;
912 vno1->hashcode = vn_nary_op_compute_hash (vno1);
913 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
920 /* Compute a hashcode for PHI operation VP1 and return it. */
922 static inline hashval_t
923 vn_phi_compute_hash (vn_phi_t vp1)
925 hashval_t result = 0;
929 result = vp1->block->index;
931 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
933 if (phi1op == VN_TOP)
935 result += iterative_hash_expr (phi1op, result);
941 /* Return the computed hashcode for phi operation P1. */
944 vn_phi_hash (const void *p1)
946 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
947 return vp1->hashcode;
950 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
953 vn_phi_eq (const void *p1, const void *p2)
955 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
956 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
958 if (vp1->block == vp2->block)
963 /* Any phi in the same block will have it's arguments in the
964 same edge order, because of how we store phi nodes. */
965 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
967 tree phi2op = VEC_index (tree, vp2->phiargs, i);
968 if (phi1op == VN_TOP || phi2op == VN_TOP)
970 if (!expressions_equal_p (phi1op, phi2op))
978 static VEC(tree, heap) *shared_lookup_phiargs;
980 /* Lookup PHI in the current hash table, and return the resulting
981 value number if it exists in the hash table. Return NULL_TREE if
982 it does not exist in the hash table. */
985 vn_phi_lookup (tree phi)
991 VEC_truncate (tree, shared_lookup_phiargs, 0);
993 /* Canonicalize the SSA_NAME's to their value number. */
994 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
996 tree def = PHI_ARG_DEF (phi, i);
997 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
998 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1000 vp1.phiargs = shared_lookup_phiargs;
1001 vp1.block = bb_for_stmt (phi);
1002 vp1.hashcode = vn_phi_compute_hash (&vp1);
1003 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1005 if (!slot && current_info == optimistic_info)
1006 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1010 return ((vn_phi_t)*slot)->result;
1013 /* Insert PHI into the current hash table with a value number of
1017 vn_phi_insert (tree phi, tree result)
1020 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1022 VEC (tree, heap) *args = NULL;
1024 /* Canonicalize the SSA_NAME's to their value number. */
1025 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1027 tree def = PHI_ARG_DEF (phi, i);
1028 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1029 VEC_safe_push (tree, heap, args, def);
1031 vp1->phiargs = args;
1032 vp1->block = bb_for_stmt (phi);
1033 vp1->result = result;
1034 vp1->hashcode = vn_phi_compute_hash (vp1);
1036 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1039 /* Because we iterate over phi operations more than once, it's
1040 possible the slot might already exist here, hence no assert.*/
1045 /* Print set of components in strongly connected component SCC to OUT. */
1048 print_scc (FILE *out, VEC (tree, heap) *scc)
1053 fprintf (out, "SCC consists of: ");
1054 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1056 print_generic_expr (out, var, 0);
1059 fprintf (out, "\n");
1062 /* Set the value number of FROM to TO, return true if it has changed
1066 set_ssa_val_to (tree from, tree to)
1071 && TREE_CODE (to) == SSA_NAME
1072 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1075 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1076 and invariants. So assert that here. */
1077 gcc_assert (to != NULL_TREE
1079 || TREE_CODE (to) == SSA_NAME
1080 || is_gimple_min_invariant (to)));
1082 if (dump_file && (dump_flags & TDF_DETAILS))
1084 fprintf (dump_file, "Setting value number of ");
1085 print_generic_expr (dump_file, from, 0);
1086 fprintf (dump_file, " to ");
1087 print_generic_expr (dump_file, to, 0);
1088 fprintf (dump_file, "\n");
1091 currval = SSA_VAL (from);
1093 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1095 SSA_VAL (from) = to;
1101 /* Set all definitions in STMT to value number to themselves.
1102 Return true if a value number changed. */
1105 defs_to_varying (tree stmt)
1107 bool changed = false;
1111 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1113 tree def = DEF_FROM_PTR (defp);
1115 VN_INFO (def)->use_processed = true;
1116 changed |= set_ssa_val_to (def, def);
1122 try_to_simplify (tree stmt, tree rhs);
1124 /* Visit a copy between LHS and RHS, return true if the value number
1128 visit_copy (tree lhs, tree rhs)
1131 /* Follow chains of copies to their destination. */
1132 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1133 rhs = SSA_VAL (rhs);
1135 /* The copy may have a more interesting constant filled expression
1136 (we don't, since we know our RHS is just an SSA name). */
1137 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1138 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1140 return set_ssa_val_to (lhs, rhs);
1143 /* Visit a unary operator RHS, value number it, and return true if the
1144 value number of LHS has changed as a result. */
1147 visit_unary_op (tree lhs, tree op)
1149 bool changed = false;
1150 tree result = vn_nary_op_lookup (op);
1154 changed = set_ssa_val_to (lhs, result);
1158 changed = set_ssa_val_to (lhs, lhs);
1159 vn_nary_op_insert (op, lhs);
1165 /* Visit a binary operator RHS, value number it, and return true if the
1166 value number of LHS has changed as a result. */
1169 visit_binary_op (tree lhs, tree op)
1171 bool changed = false;
1172 tree result = vn_nary_op_lookup (op);
1176 changed = set_ssa_val_to (lhs, result);
1180 changed = set_ssa_val_to (lhs, lhs);
1181 vn_nary_op_insert (op, lhs);
1187 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1188 and return true if the value number of the LHS has changed as a result. */
1191 visit_reference_op_load (tree lhs, tree op, tree stmt)
1193 bool changed = false;
1194 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true);
1196 /* We handle type-punning through unions by value-numbering based
1197 on offset and size of the access. Be prepared to handle a
1198 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1200 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1202 /* We will be setting the value number of lhs to the value number
1203 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1204 So first simplify and lookup this expression to see if it
1205 is already available. */
1206 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1208 && !is_gimple_min_invariant (val)
1209 && TREE_CODE (val) != SSA_NAME)
1211 tree tem = try_to_simplify (stmt, val);
1216 if (!is_gimple_min_invariant (val)
1217 && TREE_CODE (val) != SSA_NAME)
1218 result = vn_nary_op_lookup (val);
1219 /* If the expression is not yet available, value-number lhs to
1220 a new SSA_NAME we create. */
1221 if (!result && may_insert)
1223 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
1224 /* Initialize value-number information properly. */
1225 VN_INFO_GET (result)->valnum = result;
1226 VN_INFO (result)->expr = val;
1227 VN_INFO (result)->needs_insertion = true;
1228 /* As all "inserted" statements are singleton SCCs, insert
1229 to the valid table. This is strictly needed to
1230 avoid re-generating new value SSA_NAMEs for the same
1231 expression during SCC iteration over and over (the
1232 optimistic table gets cleared after each iteration).
1233 We do not need to insert into the optimistic table, as
1234 lookups there will fall back to the valid table. */
1235 if (current_info == optimistic_info)
1237 current_info = valid_info;
1238 vn_nary_op_insert (val, result);
1239 current_info = optimistic_info;
1242 vn_nary_op_insert (val, result);
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1245 fprintf (dump_file, "Inserting name ");
1246 print_generic_expr (dump_file, result, 0);
1247 fprintf (dump_file, " for expression ");
1248 print_generic_expr (dump_file, val, 0);
1249 fprintf (dump_file, "\n");
1256 changed = set_ssa_val_to (lhs, result);
1257 if (TREE_CODE (result) == SSA_NAME
1258 && VN_INFO (result)->has_constants)
1260 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1261 VN_INFO (lhs)->has_constants = true;
1266 changed = set_ssa_val_to (lhs, lhs);
1267 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1274 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1275 and return true if the value number of the LHS has changed as a result. */
1278 visit_reference_op_store (tree lhs, tree op, tree stmt)
1280 bool changed = false;
1282 bool resultsame = false;
1284 /* First we want to lookup using the *vuses* from the store and see
1285 if there the last store to this location with the same address
1288 The vuses represent the memory state before the store. If the
1289 memory state, address, and value of the store is the same as the
1290 last store to this location, then this store will produce the
1291 same memory state as that store.
1293 In this case the vdef versions for this store are value numbered to those
1294 vuse versions, since they represent the same memory state after
1297 Otherwise, the vdefs for the store are used when inserting into
1298 the table, since the store generates a new memory state. */
1300 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false);
1304 if (TREE_CODE (result) == SSA_NAME)
1305 result = SSA_VAL (result);
1306 if (TREE_CODE (op) == SSA_NAME)
1308 resultsame = expressions_equal_p (result, op);
1311 if (!result || !resultsame)
1313 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1317 if (dump_file && (dump_flags & TDF_DETAILS))
1319 fprintf (dump_file, "No store match\n");
1320 fprintf (dump_file, "Value numbering store ");
1321 print_generic_expr (dump_file, lhs, 0);
1322 fprintf (dump_file, " to ");
1323 print_generic_expr (dump_file, op, 0);
1324 fprintf (dump_file, "\n");
1326 /* Have to set value numbers before insert, since insert is
1327 going to valueize the references in-place. */
1328 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1330 VN_INFO (vdef)->use_processed = true;
1331 changed |= set_ssa_val_to (vdef, vdef);
1334 /* Do not insert structure copies into the tables. */
1335 if (is_gimple_min_invariant (op)
1336 || is_gimple_reg (op))
1337 vn_reference_insert (lhs, op, vdefs);
1341 /* We had a match, so value number the vdefs to have the value
1342 number of the vuses they came from. */
1343 ssa_op_iter op_iter;
1347 if (dump_file && (dump_flags & TDF_DETAILS))
1348 fprintf (dump_file, "Store matched earlier value,"
1349 "value numbering store vdefs to matching vuses.\n");
1351 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1353 tree def = DEF_FROM_PTR (var);
1356 /* Uh, if the vuse is a multiuse, we can't really do much
1357 here, sadly, since we don't know which value number of
1358 which vuse to use. */
1359 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1362 use = VUSE_ELEMENT_VAR (*vv, 0);
1364 VN_INFO (def)->use_processed = true;
1365 changed |= set_ssa_val_to (def, SSA_VAL (use));
1372 /* Visit and value number PHI, return true if the value number
1376 visit_phi (tree phi)
1378 bool changed = false;
1380 tree sameval = VN_TOP;
1381 bool allsame = true;
1384 /* TODO: We could check for this in init_sccvn, and replace this
1385 with a gcc_assert. */
1386 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1387 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1389 /* See if all non-TOP arguments have the same value. TOP is
1390 equivalent to everything, so we can ignore it. */
1391 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1393 tree def = PHI_ARG_DEF (phi, i);
1395 if (TREE_CODE (def) == SSA_NAME)
1396 def = SSA_VAL (def);
1399 if (sameval == VN_TOP)
1405 if (!expressions_equal_p (def, sameval))
1413 /* If all value numbered to the same value, the phi node has that
1417 if (is_gimple_min_invariant (sameval))
1419 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1420 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1424 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1425 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1428 if (TREE_CODE (sameval) == SSA_NAME)
1429 return visit_copy (PHI_RESULT (phi), sameval);
1431 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1434 /* Otherwise, see if it is equivalent to a phi node in this block. */
1435 result = vn_phi_lookup (phi);
1438 if (TREE_CODE (result) == SSA_NAME)
1439 changed = visit_copy (PHI_RESULT (phi), result);
1441 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1445 vn_phi_insert (phi, PHI_RESULT (phi));
1446 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1447 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1448 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1454 /* Return true if EXPR contains constants. */
1457 expr_has_constants (tree expr)
1459 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1462 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1465 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1466 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1467 /* Constants inside reference ops are rarely interesting, but
1468 it can take a lot of looking to find them. */
1470 case tcc_declaration:
1473 return is_gimple_min_invariant (expr);
1478 /* Replace SSA_NAMES in expr with their value numbers, and return the
1480 This is performed in place. */
1483 valueize_expr (tree expr)
1485 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1488 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1489 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1490 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1493 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1494 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1495 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1496 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1497 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1498 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1506 /* Simplify the binary expression RHS, and return the result if
1510 simplify_binary_expression (tree stmt, tree rhs)
1512 tree result = NULL_TREE;
1513 tree op0 = TREE_OPERAND (rhs, 0);
1514 tree op1 = TREE_OPERAND (rhs, 1);
1516 /* This will not catch every single case we could combine, but will
1517 catch those with constants. The goal here is to simultaneously
1518 combine constants between expressions, but avoid infinite
1519 expansion of expressions during simplification. */
1520 if (TREE_CODE (op0) == SSA_NAME)
1522 if (VN_INFO (op0)->has_constants)
1523 op0 = valueize_expr (VN_INFO (op0)->expr);
1524 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1525 op0 = SSA_VAL (op0);
1528 if (TREE_CODE (op1) == SSA_NAME)
1530 if (VN_INFO (op1)->has_constants)
1531 op1 = valueize_expr (VN_INFO (op1)->expr);
1532 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1533 op1 = SSA_VAL (op1);
1536 /* Avoid folding if nothing changed. */
1537 if (op0 == TREE_OPERAND (rhs, 0)
1538 && op1 == TREE_OPERAND (rhs, 1))
1541 fold_defer_overflow_warnings ();
1543 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1545 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1548 /* Make sure result is not a complex expression consisting
1549 of operators of operators (IE (a + b) + (a + c))
1550 Otherwise, we will end up with unbounded expressions if
1551 fold does anything at all. */
1552 if (result && valid_gimple_expression_p (result))
1558 /* Simplify the unary expression RHS, and return the result if
1562 simplify_unary_expression (tree rhs)
1564 tree result = NULL_TREE;
1565 tree op0 = TREE_OPERAND (rhs, 0);
1567 if (TREE_CODE (op0) != SSA_NAME)
1570 if (VN_INFO (op0)->has_constants)
1571 op0 = valueize_expr (VN_INFO (op0)->expr);
1572 else if (TREE_CODE (rhs) == NOP_EXPR
1573 || TREE_CODE (rhs) == CONVERT_EXPR
1574 || TREE_CODE (rhs) == REALPART_EXPR
1575 || TREE_CODE (rhs) == IMAGPART_EXPR
1576 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1578 /* We want to do tree-combining on conversion-like expressions.
1579 Make sure we feed only SSA_NAMEs or constants to fold though. */
1580 tree tem = valueize_expr (VN_INFO (op0)->expr);
1581 if (UNARY_CLASS_P (tem)
1582 || BINARY_CLASS_P (tem)
1583 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1584 || TREE_CODE (tem) == SSA_NAME
1585 || is_gimple_min_invariant (tem))
1589 /* Avoid folding if nothing changed, but remember the expression. */
1590 if (op0 == TREE_OPERAND (rhs, 0))
1593 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1596 STRIP_USELESS_TYPE_CONVERSION (result);
1597 if (valid_gimple_expression_p (result))
1604 /* Try to simplify RHS using equivalences and constant folding. */
1607 try_to_simplify (tree stmt, tree rhs)
1611 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1612 in this case, there is no point in doing extra work. */
1613 if (TREE_CODE (rhs) == SSA_NAME)
1616 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1618 case tcc_declaration:
1619 tem = get_symbol_constant_value (rhs);
1625 /* Do not do full-blown reference lookup here, but simplify
1626 reads from constant aggregates. */
1627 tem = fold_const_aggregate_ref (rhs);
1631 /* Fallthrough for some codes that can operate on registers. */
1632 if (!(TREE_CODE (rhs) == REALPART_EXPR
1633 || TREE_CODE (rhs) == IMAGPART_EXPR
1634 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1636 /* We could do a little more with unary ops, if they expand
1637 into binary ops, but it's debatable whether it is worth it. */
1639 return simplify_unary_expression (rhs);
1641 case tcc_comparison:
1643 return simplify_binary_expression (stmt, rhs);
1652 /* Visit and value number USE, return true if the value number
1656 visit_use (tree use)
1658 bool changed = false;
1659 tree stmt = SSA_NAME_DEF_STMT (use);
1662 VN_INFO (use)->use_processed = true;
1664 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1665 if (dump_file && (dump_flags & TDF_DETAILS)
1666 && !IS_EMPTY_STMT (stmt))
1668 fprintf (dump_file, "Value numbering ");
1669 print_generic_expr (dump_file, use, 0);
1670 fprintf (dump_file, " stmt = ");
1671 print_generic_stmt (dump_file, stmt, 0);
1674 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1675 if (TREE_CODE (stmt) == RETURN_EXPR
1676 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1677 stmt = TREE_OPERAND (stmt, 0);
1679 ann = stmt_ann (stmt);
1681 /* Handle uninitialized uses. */
1682 if (IS_EMPTY_STMT (stmt))
1684 changed = set_ssa_val_to (use, use);
1688 if (TREE_CODE (stmt) == PHI_NODE)
1690 changed = visit_phi (stmt);
1692 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1693 || (ann && ann->has_volatile_ops)
1694 || tree_could_throw_p (stmt))
1696 changed = defs_to_varying (stmt);
1700 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1701 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1704 STRIP_USELESS_TYPE_CONVERSION (rhs);
1706 /* Shortcut for copies. Simplifying copies is pointless,
1707 since we copy the expression and value they represent. */
1708 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1710 changed = visit_copy (lhs, rhs);
1713 simplified = try_to_simplify (stmt, rhs);
1714 if (simplified && simplified != rhs)
1716 if (dump_file && (dump_flags & TDF_DETAILS))
1718 fprintf (dump_file, "RHS ");
1719 print_generic_expr (dump_file, rhs, 0);
1720 fprintf (dump_file, " simplified to ");
1721 print_generic_expr (dump_file, simplified, 0);
1722 if (TREE_CODE (lhs) == SSA_NAME)
1723 fprintf (dump_file, " has constants %d\n",
1724 expr_has_constants (simplified));
1726 fprintf (dump_file, "\n");
1729 /* Setting value numbers to constants will occasionally
1730 screw up phi congruence because constants are not
1731 uniquely associated with a single ssa name that can be
1733 if (simplified && is_gimple_min_invariant (simplified)
1734 && TREE_CODE (lhs) == SSA_NAME
1735 && simplified != rhs)
1737 VN_INFO (lhs)->expr = simplified;
1738 VN_INFO (lhs)->has_constants = true;
1739 changed = set_ssa_val_to (lhs, simplified);
1742 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1743 && TREE_CODE (lhs) == SSA_NAME)
1745 changed = visit_copy (lhs, simplified);
1748 else if (simplified)
1750 if (TREE_CODE (lhs) == SSA_NAME)
1752 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1753 /* We have to unshare the expression or else
1754 valuizing may change the IL stream. */
1755 VN_INFO (lhs)->expr = unshare_expr (simplified);
1759 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1761 VN_INFO (lhs)->has_constants = true;
1762 VN_INFO (lhs)->expr = unshare_expr (rhs);
1764 else if (TREE_CODE (lhs) == SSA_NAME)
1766 /* We reset expr and constantness here because we may
1767 have been value numbering optimistically, and
1768 iterating. They may become non-constant in this case,
1769 even if they were optimistically constant. */
1771 VN_INFO (lhs)->has_constants = false;
1772 VN_INFO (lhs)->expr = lhs;
1775 if (TREE_CODE (lhs) == SSA_NAME
1776 /* We can substitute SSA_NAMEs that are live over
1777 abnormal edges with their constant value. */
1778 && !is_gimple_min_invariant (rhs)
1779 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1780 changed = defs_to_varying (stmt);
1781 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1783 changed = visit_reference_op_store (lhs, rhs, stmt);
1785 else if (TREE_CODE (lhs) == SSA_NAME)
1787 if (is_gimple_min_invariant (rhs))
1789 VN_INFO (lhs)->has_constants = true;
1790 VN_INFO (lhs)->expr = rhs;
1791 changed = set_ssa_val_to (lhs, rhs);
1795 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1798 changed = visit_unary_op (lhs, rhs);
1801 changed = visit_binary_op (lhs, rhs);
1803 /* If tcc_vl_expr ever encompasses more than
1804 CALL_EXPR, this will need to be changed. */
1806 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1807 changed = visit_reference_op_load (lhs, rhs, stmt);
1809 changed = defs_to_varying (stmt);
1811 case tcc_declaration:
1813 changed = visit_reference_op_load (lhs, rhs, stmt);
1815 case tcc_expression:
1816 if (TREE_CODE (rhs) == ADDR_EXPR)
1818 changed = visit_unary_op (lhs, rhs);
1823 changed = defs_to_varying (stmt);
1829 changed = defs_to_varying (stmt);
1836 /* Compare two operands by reverse postorder index */
1839 compare_ops (const void *pa, const void *pb)
1841 const tree opa = *((const tree *)pa);
1842 const tree opb = *((const tree *)pb);
1843 tree opstmta = SSA_NAME_DEF_STMT (opa);
1844 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1848 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1850 else if (IS_EMPTY_STMT (opstmta))
1852 else if (IS_EMPTY_STMT (opstmtb))
1855 bba = bb_for_stmt (opstmta);
1856 bbb = bb_for_stmt (opstmtb);
1867 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1869 else if (TREE_CODE (opstmta) == PHI_NODE)
1871 else if (TREE_CODE (opstmtb) == PHI_NODE)
1873 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1875 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1878 /* Sort an array containing members of a strongly connected component
1879 SCC so that the members are ordered by RPO number.
1880 This means that when the sort is complete, iterating through the
1881 array will give you the members in RPO order. */
1884 sort_scc (VEC (tree, heap) *scc)
1886 qsort (VEC_address (tree, scc),
1887 VEC_length (tree, scc),
1892 /* Process a strongly connected component in the SSA graph. */
1895 process_scc (VEC (tree, heap) *scc)
1897 /* If the SCC has a single member, just visit it. */
1899 if (VEC_length (tree, scc) == 1)
1901 tree use = VEC_index (tree, scc, 0);
1902 if (!VN_INFO (use)->use_processed)
1909 unsigned int iterations = 0;
1910 bool changed = true;
1912 /* Iterate over the SCC with the optimistic table until it stops
1914 current_info = optimistic_info;
1919 htab_empty (optimistic_info->nary);
1920 htab_empty (optimistic_info->phis);
1921 htab_empty (optimistic_info->references);
1922 obstack_free (&optimistic_info->nary_obstack, NULL);
1923 gcc_obstack_init (&optimistic_info->nary_obstack);
1924 empty_alloc_pool (optimistic_info->phis_pool);
1925 empty_alloc_pool (optimistic_info->references_pool);
1926 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1927 changed |= visit_use (var);
1930 if (dump_file && (dump_flags & TDF_STATS))
1931 fprintf (dump_file, "Processing SCC required %d iterations\n",
1934 /* Finally, visit the SCC once using the valid table. */
1935 current_info = valid_info;
1936 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1941 /* Depth first search on NAME to discover and process SCC's in the SSA
1943 Execution of this algorithm relies on the fact that the SCC's are
1944 popped off the stack in topological order.
1945 Returns true if successful, false if we stopped processing SCC's due
1946 to ressource constraints. */
1956 VN_INFO (name)->dfsnum = next_dfs_num++;
1957 VN_INFO (name)->visited = true;
1958 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1960 VEC_safe_push (tree, heap, sccstack, name);
1961 VN_INFO (name)->on_sccstack = true;
1962 defstmt = SSA_NAME_DEF_STMT (name);
1964 /* Recursively DFS on our operands, looking for SCC's. */
1965 if (!IS_EMPTY_STMT (defstmt))
1967 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1970 tree use = USE_FROM_PTR (usep);
1972 /* Since we handle phi nodes, we will sometimes get
1973 invariants in the use expression. */
1974 if (TREE_CODE (use) != SSA_NAME)
1977 if (! (VN_INFO (use)->visited))
1981 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1982 VN_INFO (use)->low);
1984 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1985 && VN_INFO (use)->on_sccstack)
1987 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1988 VN_INFO (name)->low);
1993 /* See if we found an SCC. */
1994 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1996 VEC (tree, heap) *scc = NULL;
1999 /* Found an SCC, pop the components off the SCC stack and
2003 x = VEC_pop (tree, sccstack);
2005 VN_INFO (x)->on_sccstack = false;
2006 VEC_safe_push (tree, heap, scc, x);
2007 } while (x != name);
2009 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2010 if (VEC_length (tree, scc)
2011 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2014 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2015 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2016 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2020 if (VEC_length (tree, scc) > 1)
2023 if (dump_file && (dump_flags & TDF_DETAILS))
2024 print_scc (dump_file, scc);
2028 VEC_free (tree, heap, scc);
2034 /* Allocate a value number table. */
2037 allocate_vn_table (vn_tables_t table)
2039 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2040 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2041 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2044 gcc_obstack_init (&table->nary_obstack);
2045 table->phis_pool = create_alloc_pool ("VN phis",
2046 sizeof (struct vn_phi_s),
2048 table->references_pool = create_alloc_pool ("VN references",
2049 sizeof (struct vn_reference_s),
2053 /* Free a value number table. */
2056 free_vn_table (vn_tables_t table)
2058 htab_delete (table->phis);
2059 htab_delete (table->nary);
2060 htab_delete (table->references);
2061 obstack_free (&table->nary_obstack, NULL);
2062 free_alloc_pool (table->phis_pool);
2063 free_alloc_pool (table->references_pool);
2071 int *rpo_numbers_temp;
2075 calculate_dominance_info (CDI_DOMINATORS);
2079 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2080 /* VEC_alloc doesn't actually grow it to the right size, it just
2081 preallocates the space to do so. */
2082 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2083 gcc_obstack_init (&vn_ssa_aux_obstack);
2085 shared_lookup_phiargs = NULL;
2086 shared_lookup_vops = NULL;
2087 shared_lookup_references = NULL;
2088 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2089 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2090 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2092 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2093 the i'th block in RPO order is bb. We want to map bb's to RPO
2094 numbers, so we need to rearrange this array. */
2095 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2096 rpo_numbers[rpo_numbers_temp[j]] = j;
2098 XDELETE (rpo_numbers_temp);
2100 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2102 /* Create the VN_INFO structures, and initialize value numbers to
2104 for (i = 0; i < num_ssa_names; i++)
2106 tree name = ssa_name (i);
2109 VN_INFO_GET (name)->valnum = VN_TOP;
2110 VN_INFO (name)->expr = name;
2116 block_stmt_iterator bsi;
2117 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2119 tree stmt = bsi_stmt (bsi);
2120 stmt_ann (stmt)->uid = id++;
2124 /* Create the valid and optimistic value numbering tables. */
2125 valid_info = XCNEW (struct vn_tables_s);
2126 allocate_vn_table (valid_info);
2127 optimistic_info = XCNEW (struct vn_tables_s);
2128 allocate_vn_table (optimistic_info);
2133 switch_to_PRE_table (void)
2135 pre_info = XCNEW (struct vn_tables_s);
2136 allocate_vn_table (pre_info);
2137 current_info = pre_info;
2145 VEC_free (tree, heap, shared_lookup_phiargs);
2146 VEC_free (tree, gc, shared_lookup_vops);
2147 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2148 XDELETEVEC (rpo_numbers);
2150 for (i = 0; i < num_ssa_names; i++)
2152 tree name = ssa_name (i);
2154 && SSA_NAME_VALUE (name)
2155 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2156 SSA_NAME_VALUE (name) = NULL;
2158 && VN_INFO (name)->needs_insertion)
2159 release_ssa_name (name);
2161 obstack_free (&vn_ssa_aux_obstack, NULL);
2162 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2164 VEC_free (tree, heap, sccstack);
2165 free_vn_table (valid_info);
2166 XDELETE (valid_info);
2167 free_vn_table (optimistic_info);
2168 XDELETE (optimistic_info);
2171 free_vn_table (pre_info);
2176 /* Do SCCVN. Returns true if it finished, false if we bailed out
2177 due to ressource constraints. */
2180 run_scc_vn (bool may_insert_arg)
2185 may_insert = may_insert_arg;
2188 current_info = valid_info;
2190 for (param = DECL_ARGUMENTS (current_function_decl);
2192 param = TREE_CHAIN (param))
2194 if (gimple_default_def (cfun, param) != NULL)
2196 tree def = gimple_default_def (cfun, param);
2197 SSA_VAL (def) = def;
2201 for (i = 1; i < num_ssa_names; ++i)
2203 tree name = ssa_name (i);
2205 && VN_INFO (name)->visited == false
2206 && !has_zero_uses (name))
2215 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "Value numbers:\n");
2218 for (i = 0; i < num_ssa_names; i++)
2220 tree name = ssa_name (i);
2221 if (name && VN_INFO (name)->visited
2222 && (SSA_VAL (name) != name
2223 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2225 print_generic_expr (dump_file, name, 0);
2226 fprintf (dump_file, " = ");
2227 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2228 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2230 print_generic_expr (dump_file, SSA_VAL (name), 0);
2231 fprintf (dump_file, "\n");