1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 /* Nary operations in the hashtable consist of length operands, an
119 opcode, and a type. Result is the value number of the operation,
120 and hashcode is stored to avoid having to calculate it
123 typedef struct vn_nary_op_s
125 ENUM_BITFIELD(tree_code) opcode : 16;
126 unsigned length : 16;
132 typedef const struct vn_nary_op_s *const_vn_nary_op_t;
134 /* Phi nodes in the hashtable consist of their non-VN_TOP phi
135 arguments, and the basic block the phi is in. Result is the value
136 number of the operation, and hashcode is stored to avoid having to
137 calculate it repeatedly. Phi nodes not in the same block are never
138 considered equivalent. */
140 typedef struct vn_phi_s
142 VEC (tree, heap) *phiargs;
147 typedef const struct vn_phi_s *const_vn_phi_t;
149 /* Reference operands only exist in reference operations structures.
150 They consist of an opcode, type, and some number of operands. For
151 a given opcode, some, all, or none of the operands may be used.
152 The operands are there to store the information that makes up the
153 portion of the addressing calculation that opcode performs. */
155 typedef struct vn_reference_op_struct
157 enum tree_code opcode;
162 typedef vn_reference_op_s *vn_reference_op_t;
163 typedef const vn_reference_op_s *const_vn_reference_op_t;
165 DEF_VEC_O(vn_reference_op_s);
166 DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
168 /* A reference operation in the hashtable is representation as a
169 collection of vuses, representing the memory state at the time of
170 the operation, and a collection of operands that make up the
171 addressing calculation. If two vn_reference_t's have the same set
172 of operands, they access the same memory location. We also store
173 the resulting value number, and the hashcode. The vuses are
174 always stored in order sorted by ssa name version. */
176 typedef struct vn_reference_s
178 VEC (tree, gc) *vuses;
179 VEC (vn_reference_op_s, heap) *operands;
183 typedef const struct vn_reference_s *const_vn_reference_t;
185 /* Valid hashtables storing information we have proven to be
188 static vn_tables_t valid_info;
190 /* Optimistic hashtables storing information we are making assumptions about
191 during iterations. */
193 static vn_tables_t optimistic_info;
195 /* PRE hashtables storing information about mapping from expressions to
198 static vn_tables_t pre_info;
200 /* Pointer to the set of hashtables that is currently being used.
201 Should always point to either the optimistic_info, or the
204 static vn_tables_t current_info;
207 /* Reverse post order index for each basic block. */
209 static int *rpo_numbers;
211 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
213 /* This represents the top of the VN lattice, which is the universal
218 /* Next DFS number and the stack for strongly connected component
221 static unsigned int next_dfs_num;
222 static VEC (tree, heap) *sccstack;
224 static bool may_insert;
227 DEF_VEC_P(vn_ssa_aux_t);
228 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
230 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
231 are allocated on an obstack for locality reasons, and to free them
232 without looping over the VEC. */
234 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
235 static struct obstack vn_ssa_aux_obstack;
237 /* Return the value numbering information for a given SSA name. */
242 return VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
243 SSA_NAME_VERSION (name));
246 /* Set the value numbering info for a given SSA name to a given
250 VN_INFO_SET (tree name, vn_ssa_aux_t value)
252 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
253 SSA_NAME_VERSION (name), value);
256 /* Initialize the value numbering info for a given SSA name.
257 This should be called just once for every SSA name. */
260 VN_INFO_GET (tree name)
262 vn_ssa_aux_t newinfo;
264 newinfo = obstack_alloc (&vn_ssa_aux_obstack, sizeof (struct vn_ssa_aux));
265 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
266 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
267 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
268 SSA_NAME_VERSION (name) + 1);
269 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
270 SSA_NAME_VERSION (name), newinfo);
275 /* Free a phi operation structure VP. */
281 VEC_free (tree, heap, phi->phiargs);
284 /* Free a reference operation structure VP. */
287 free_reference (void *vp)
289 vn_reference_t vr = vp;
290 VEC_free (vn_reference_op_s, heap, vr->operands);
293 /* Compare two reference operands P1 and P2 for equality. return true if
294 they are equal, and false otherwise. */
297 vn_reference_op_eq (const void *p1, const void *p2)
299 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
300 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
301 return vro1->opcode == vro2->opcode
302 && vro1->type == vro2->type
303 && expressions_equal_p (vro1->op0, vro2->op0)
304 && expressions_equal_p (vro1->op1, vro2->op1);
307 /* Compute the hash for a reference operand VRO1 */
310 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
312 return iterative_hash_expr (vro1->op0, vro1->opcode)
313 + iterative_hash_expr (vro1->op1, vro1->opcode);
316 /* Return the hashcode for a given reference operation P1. */
319 vn_reference_hash (const void *p1)
321 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
322 return vr1->hashcode;
325 /* Compute a hash for the reference operation VR1 and return it. */
327 static inline hashval_t
328 vn_reference_compute_hash (const vn_reference_t vr1)
330 hashval_t result = 0;
333 vn_reference_op_t vro;
335 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
336 result += iterative_hash_expr (v, 0);
337 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
338 result += vn_reference_op_compute_hash (vro);
343 /* Return true if reference operations P1 and P2 are equivalent. This
344 means they have the same set of operands and vuses. */
347 vn_reference_eq (const void *p1, const void *p2)
351 vn_reference_op_t vro;
353 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
354 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
356 if (vr1->vuses == vr2->vuses
357 && vr1->operands == vr2->operands)
360 /* Impossible for them to be equivalent if they have different
362 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
365 /* We require that address operands be canonicalized in a way that
366 two memory references will have the same operands if they are
368 if (VEC_length (vn_reference_op_s, vr1->operands)
369 != VEC_length (vn_reference_op_s, vr2->operands))
372 /* The memory state is more often different than the address of the
373 store/load, so check it first. */
374 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
376 if (VEC_index (tree, vr2->vuses, i) != v)
380 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
382 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
389 /* Place the vuses from STMT into *result */
392 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
400 VEC_reserve_exact (tree, gc, *result,
401 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
403 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
404 VEC_quick_push (tree, *result, vuse);
408 /* Copy the VUSE names in STMT into a vector, and return
412 copy_vuses_from_stmt (tree stmt)
414 VEC (tree, gc) *vuses = NULL;
416 vuses_to_vec (stmt, &vuses);
421 /* Place the vdefs from STMT into *result */
424 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
432 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
434 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
435 VEC_quick_push (tree, *result, vdef);
438 /* Copy the names of vdef results in STMT into a vector, and return
441 static VEC (tree, gc) *
442 copy_vdefs_from_stmt (tree stmt)
444 VEC (tree, gc) *vdefs = NULL;
446 vdefs_to_vec (stmt, &vdefs);
451 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
452 static VEC (tree, gc) *shared_lookup_vops;
454 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
455 This function will overwrite the current SHARED_LOOKUP_VOPS
459 shared_vuses_from_stmt (tree stmt)
461 VEC_truncate (tree, shared_lookup_vops, 0);
462 vuses_to_vec (stmt, &shared_lookup_vops);
464 return shared_lookup_vops;
467 /* Copy the operations present in load/store/call REF into RESULT, a vector of
468 vn_reference_op_s's. */
471 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
473 /* Calls are different from all other reference operations. */
474 if (TREE_CODE (ref) == CALL_EXPR)
476 vn_reference_op_s temp;
478 call_expr_arg_iterator iter;
481 /* Copy the call_expr opcode, type, function being called, and
483 memset (&temp, 0, sizeof (temp));
484 temp.type = TREE_TYPE (ref);
485 temp.opcode = CALL_EXPR;
486 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
488 callfn = get_callee_fndecl (ref);
490 callfn = CALL_EXPR_FN (ref);
491 temp.type = TREE_TYPE (callfn);
492 temp.opcode = TREE_CODE (callfn);
494 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
496 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
498 memset (&temp, 0, sizeof (temp));
499 temp.type = TREE_TYPE (callarg);
500 temp.opcode = TREE_CODE (callarg);
502 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
507 /* For non-calls, store the information that makes up the address. */
511 vn_reference_op_s temp;
513 memset (&temp, 0, sizeof (temp));
514 temp.type = TREE_TYPE (ref);
515 temp.opcode = TREE_CODE (ref);
519 case ALIGN_INDIRECT_REF:
520 case MISALIGNED_INDIRECT_REF:
522 /* The only operand is the address, which gets its own
523 vn_reference_op_s structure. */
526 /* Record bits and position. */
527 temp.op0 = TREE_OPERAND (ref, 1);
528 temp.op1 = TREE_OPERAND (ref, 2);
531 /* If this is a reference to a union member, record the union
532 member size as operand. Do so only if we are doing
533 expression insertion (during FRE), as PRE currently gets
534 confused with this. */
536 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
537 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
538 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
540 temp.type = NULL_TREE;
541 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
544 /* Record field as operand. */
545 temp.op0 = TREE_OPERAND (ref, 1);
547 case ARRAY_RANGE_REF:
549 /* Record index as operand. */
550 temp.op0 = TREE_OPERAND (ref, 1);
551 temp.op1 = TREE_OPERAND (ref, 3);
567 /* These are only interesting for their operands, their
568 existence, and their type. They will never be the last
569 ref in the chain of references (IE they require an
570 operand), so we don't have to put anything
571 for op* as it will be handled by the iteration */
574 case VIEW_CONVERT_EXPR:
581 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
583 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
584 ref = TREE_OPERAND (ref, 0);
590 /* Create a vector of vn_reference_op_s structures from REF, a
591 REFERENCE_CLASS_P tree. The vector is not shared. */
593 static VEC(vn_reference_op_s, heap) *
594 create_reference_ops_from_ref (tree ref)
596 VEC (vn_reference_op_s, heap) *result = NULL;
598 copy_reference_ops_from_ref (ref, &result);
602 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
604 /* Create a vector of vn_reference_op_s structures from REF, a
605 REFERENCE_CLASS_P tree. The vector is shared among all callers of
608 static VEC(vn_reference_op_s, heap) *
609 shared_reference_ops_from_ref (tree ref)
613 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
614 copy_reference_ops_from_ref (ref, &shared_lookup_references);
615 return shared_lookup_references;
619 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
620 structures into their value numbers. This is done in-place, and
621 the vector passed in is returned. */
623 static VEC (vn_reference_op_s, heap) *
624 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
626 vn_reference_op_t vro;
629 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
631 if (vro->opcode == SSA_NAME
632 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
633 vro->op0 = SSA_VAL (vro->op0);
639 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
640 their value numbers. This is done in-place, and the vector passed
643 static VEC (tree, gc) *
644 valueize_vuses (VEC (tree, gc) *orig)
646 bool made_replacement = false;
650 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
652 if (vuse != SSA_VAL (vuse))
654 made_replacement = true;
655 VEC_replace (tree, orig, i, SSA_VAL (vuse));
659 if (made_replacement && VEC_length (tree, orig) > 1)
665 /* Lookup OP in the current hash table, and return the resulting
666 value number if it exists in the hash table. Return NULL_TREE if
667 it does not exist in the hash table. */
670 vn_reference_lookup (tree op, VEC (tree, gc) *vuses)
673 struct vn_reference_s vr1;
675 vr1.vuses = valueize_vuses (vuses);
676 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
677 vr1.hashcode = vn_reference_compute_hash (&vr1);
678 slot = htab_find_slot_with_hash (current_info->references, &vr1, vr1.hashcode,
680 if (!slot && current_info == optimistic_info)
681 slot = htab_find_slot_with_hash (valid_info->references, &vr1, vr1.hashcode,
686 return ((vn_reference_t)*slot)->result;
689 /* Insert OP into the current hash table with a value number of
693 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
698 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
700 vr1->vuses = valueize_vuses (vuses);
701 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
702 vr1->hashcode = vn_reference_compute_hash (vr1);
703 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
705 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
708 /* Because we lookup stores using vuses, and value number failures
709 using the vdefs (see visit_reference_op_store for how and why),
710 it's possible that on failure we may try to insert an already
711 inserted store. This is not wrong, there is no ssa name for a
712 store that we could use as a differentiator anyway. Thus, unlike
713 the other lookup functions, you cannot gcc_assert (!*slot)
716 /* But free the old slot in case of a collision. */
718 free_reference (*slot);
723 /* Compute and return the hash value for nary operation VBO1. */
725 static inline hashval_t
726 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
731 for (i = 0; i < vno1->length; ++i)
732 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
733 vno1->op[i] = SSA_VAL (vno1->op[i]);
735 if (vno1->length == 2
736 && commutative_tree_code (vno1->opcode)
737 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
739 tree temp = vno1->op[0];
740 vno1->op[0] = vno1->op[1];
744 for (i = 0; i < vno1->length; ++i)
745 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
750 /* Return the computed hashcode for nary operation P1. */
753 vn_nary_op_hash (const void *p1)
755 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
756 return vno1->hashcode;
759 /* Compare nary operations P1 and P2 and return true if they are
763 vn_nary_op_eq (const void *p1, const void *p2)
765 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
766 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
769 if (vno1->opcode != vno2->opcode
770 || vno1->type != vno2->type)
773 for (i = 0; i < vno1->length; ++i)
774 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
780 /* Lookup OP in the current hash table, and return the resulting
781 value number if it exists in the hash table. Return NULL_TREE if
782 it does not exist in the hash table. */
785 vn_nary_op_lookup (tree op)
788 struct vn_nary_op_s vno1;
791 vno1.opcode = TREE_CODE (op);
792 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
793 vno1.type = TREE_TYPE (op);
794 for (i = 0; i < vno1.length; ++i)
795 vno1.op[i] = TREE_OPERAND (op, i);
796 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
797 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
799 if (!slot && current_info == optimistic_info)
800 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
804 return ((vn_nary_op_t)*slot)->result;
807 /* Insert OP into the current hash table with a value number of
811 vn_nary_op_insert (tree op, tree result)
813 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
818 vno1 = obstack_alloc (¤t_info->nary_obstack,
819 (sizeof (struct vn_nary_op_s)
820 - sizeof (tree) * (4 - length)));
821 vno1->opcode = TREE_CODE (op);
822 vno1->length = length;
823 vno1->type = TREE_TYPE (op);
824 for (i = 0; i < vno1->length; ++i)
825 vno1->op[i] = TREE_OPERAND (op, i);
826 vno1->result = result;
827 vno1->hashcode = vn_nary_op_compute_hash (vno1);
828 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
835 /* Compute a hashcode for PHI operation VP1 and return it. */
837 static inline hashval_t
838 vn_phi_compute_hash (vn_phi_t vp1)
840 hashval_t result = 0;
844 result = vp1->block->index;
846 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
848 if (phi1op == VN_TOP)
850 result += iterative_hash_expr (phi1op, result);
856 /* Return the computed hashcode for phi operation P1. */
859 vn_phi_hash (const void *p1)
861 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
862 return vp1->hashcode;
865 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
868 vn_phi_eq (const void *p1, const void *p2)
870 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
871 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
873 if (vp1->block == vp2->block)
878 /* Any phi in the same block will have it's arguments in the
879 same edge order, because of how we store phi nodes. */
880 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
882 tree phi2op = VEC_index (tree, vp2->phiargs, i);
883 if (phi1op == VN_TOP || phi2op == VN_TOP)
885 if (!expressions_equal_p (phi1op, phi2op))
893 static VEC(tree, heap) *shared_lookup_phiargs;
895 /* Lookup PHI in the current hash table, and return the resulting
896 value number if it exists in the hash table. Return NULL_TREE if
897 it does not exist in the hash table. */
900 vn_phi_lookup (tree phi)
906 VEC_truncate (tree, shared_lookup_phiargs, 0);
908 /* Canonicalize the SSA_NAME's to their value number. */
909 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
911 tree def = PHI_ARG_DEF (phi, i);
912 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
913 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
915 vp1.phiargs = shared_lookup_phiargs;
916 vp1.block = bb_for_stmt (phi);
917 vp1.hashcode = vn_phi_compute_hash (&vp1);
918 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
920 if (!slot && current_info == optimistic_info)
921 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
925 return ((vn_phi_t)*slot)->result;
928 /* Insert PHI into the current hash table with a value number of
932 vn_phi_insert (tree phi, tree result)
935 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
937 VEC (tree, heap) *args = NULL;
939 /* Canonicalize the SSA_NAME's to their value number. */
940 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
942 tree def = PHI_ARG_DEF (phi, i);
943 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
944 VEC_safe_push (tree, heap, args, def);
947 vp1->block = bb_for_stmt (phi);
948 vp1->result = result;
949 vp1->hashcode = vn_phi_compute_hash (vp1);
951 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
954 /* Because we iterate over phi operations more than once, it's
955 possible the slot might already exist here, hence no assert.*/
960 /* Print set of components in strongly connected component SCC to OUT. */
963 print_scc (FILE *out, VEC (tree, heap) *scc)
968 fprintf (out, "SCC consists of: ");
969 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
971 print_generic_expr (out, var, 0);
977 /* Set the value number of FROM to TO, return true if it has changed
981 set_ssa_val_to (tree from, tree to)
986 && TREE_CODE (to) == SSA_NAME
987 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
990 /* The only thing we allow as value numbers are VN_TOP, ssa_names
991 and invariants. So assert that here. */
992 gcc_assert (to != NULL_TREE
994 || TREE_CODE (to) == SSA_NAME
995 || is_gimple_min_invariant (to)));
997 if (dump_file && (dump_flags & TDF_DETAILS))
999 fprintf (dump_file, "Setting value number of ");
1000 print_generic_expr (dump_file, from, 0);
1001 fprintf (dump_file, " to ");
1002 print_generic_expr (dump_file, to, 0);
1003 fprintf (dump_file, "\n");
1006 currval = SSA_VAL (from);
1008 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1010 SSA_VAL (from) = to;
1016 /* Set all definitions in STMT to value number to themselves.
1017 Return true if a value number changed. */
1020 defs_to_varying (tree stmt)
1022 bool changed = false;
1026 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1028 tree def = DEF_FROM_PTR (defp);
1030 VN_INFO (def)->use_processed = true;
1031 changed |= set_ssa_val_to (def, def);
1037 try_to_simplify (tree stmt, tree rhs);
1039 /* Visit a copy between LHS and RHS, return true if the value number
1043 visit_copy (tree lhs, tree rhs)
1046 /* Follow chains of copies to their destination. */
1047 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1048 rhs = SSA_VAL (rhs);
1050 /* The copy may have a more interesting constant filled expression
1051 (we don't, since we know our RHS is just an SSA name). */
1052 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1053 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1055 return set_ssa_val_to (lhs, rhs);
1058 /* Visit a unary operator RHS, value number it, and return true if the
1059 value number of LHS has changed as a result. */
1062 visit_unary_op (tree lhs, tree op)
1064 bool changed = false;
1065 tree result = vn_nary_op_lookup (op);
1069 changed = set_ssa_val_to (lhs, result);
1073 changed = set_ssa_val_to (lhs, lhs);
1074 vn_nary_op_insert (op, lhs);
1080 /* Visit a binary operator RHS, value number it, and return true if the
1081 value number of LHS has changed as a result. */
1084 visit_binary_op (tree lhs, tree op)
1086 bool changed = false;
1087 tree result = vn_nary_op_lookup (op);
1091 changed = set_ssa_val_to (lhs, result);
1095 changed = set_ssa_val_to (lhs, lhs);
1096 vn_nary_op_insert (op, lhs);
1102 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1103 and return true if the value number of the LHS has changed as a result. */
1106 visit_reference_op_load (tree lhs, tree op, tree stmt)
1108 bool changed = false;
1109 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt));
1111 /* We handle type-punning through unions by value-numbering based
1112 on offset and size of the access. Be prepared to handle a
1113 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1115 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1117 /* We will be setting the value number of lhs to the value number
1118 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1119 So first simplify and lookup this expression to see if it
1120 is already available. */
1121 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1123 && !is_gimple_min_invariant (val)
1124 && TREE_CODE (val) != SSA_NAME)
1126 tree tem = try_to_simplify (stmt, val);
1131 if (!is_gimple_min_invariant (val)
1132 && TREE_CODE (val) != SSA_NAME)
1133 result = vn_nary_op_lookup (val);
1134 /* If the expression is not yet available, value-number lhs to
1135 a new SSA_NAME we create. */
1136 if (!result && may_insert)
1138 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
1139 /* Initialize value-number information properly. */
1140 VN_INFO_GET (result)->valnum = result;
1141 VN_INFO (result)->expr = val;
1142 VN_INFO (result)->needs_insertion = true;
1143 /* As all "inserted" statements are singleton SCCs, insert
1144 to the valid table. This is strictly needed to
1145 avoid re-generating new value SSA_NAMEs for the same
1146 expression during SCC iteration over and over (the
1147 optimistic table gets cleared after each iteration).
1148 We do not need to insert into the optimistic table, as
1149 lookups there will fall back to the valid table. */
1150 if (current_info == optimistic_info)
1152 current_info = valid_info;
1153 vn_nary_op_insert (val, result);
1154 current_info = optimistic_info;
1157 vn_nary_op_insert (val, result);
1158 if (dump_file && (dump_flags & TDF_DETAILS))
1160 fprintf (dump_file, "Inserting name ");
1161 print_generic_expr (dump_file, result, 0);
1162 fprintf (dump_file, " for expression ");
1163 print_generic_expr (dump_file, val, 0);
1164 fprintf (dump_file, "\n");
1171 changed = set_ssa_val_to (lhs, result);
1175 changed = set_ssa_val_to (lhs, lhs);
1176 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1183 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1184 and return true if the value number of the LHS has changed as a result. */
1187 visit_reference_op_store (tree lhs, tree op, tree stmt)
1189 bool changed = false;
1191 bool resultsame = false;
1193 /* First we want to lookup using the *vuses* from the store and see
1194 if there the last store to this location with the same address
1197 The vuses represent the memory state before the store. If the
1198 memory state, address, and value of the store is the same as the
1199 last store to this location, then this store will produce the
1200 same memory state as that store.
1202 In this case the vdef versions for this store are value numbered to those
1203 vuse versions, since they represent the same memory state after
1206 Otherwise, the vdefs for the store are used when inserting into
1207 the table, since the store generates a new memory state. */
1209 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt));
1213 if (TREE_CODE (result) == SSA_NAME)
1214 result = SSA_VAL (result);
1215 if (TREE_CODE (op) == SSA_NAME)
1217 resultsame = expressions_equal_p (result, op);
1220 if (!result || !resultsame)
1222 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1226 if (dump_file && (dump_flags & TDF_DETAILS))
1228 fprintf (dump_file, "No store match\n");
1229 fprintf (dump_file, "Value numbering store ");
1230 print_generic_expr (dump_file, lhs, 0);
1231 fprintf (dump_file, " to ");
1232 print_generic_expr (dump_file, op, 0);
1233 fprintf (dump_file, "\n");
1235 /* Have to set value numbers before insert, since insert is
1236 going to valueize the references in-place. */
1237 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1239 VN_INFO (vdef)->use_processed = true;
1240 changed |= set_ssa_val_to (vdef, vdef);
1243 /* Do not insert structure copies into the tables. */
1244 if (is_gimple_min_invariant (op)
1245 || is_gimple_reg (op))
1246 vn_reference_insert (lhs, op, vdefs);
1250 /* We had a match, so value number the vdefs to have the value
1251 number of the vuses they came from. */
1252 ssa_op_iter op_iter;
1256 if (dump_file && (dump_flags & TDF_DETAILS))
1257 fprintf (dump_file, "Store matched earlier value,"
1258 "value numbering store vdefs to matching vuses.\n");
1260 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1262 tree def = DEF_FROM_PTR (var);
1265 /* Uh, if the vuse is a multiuse, we can't really do much
1266 here, sadly, since we don't know which value number of
1267 which vuse to use. */
1268 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1271 use = VUSE_ELEMENT_VAR (*vv, 0);
1273 VN_INFO (def)->use_processed = true;
1274 changed |= set_ssa_val_to (def, SSA_VAL (use));
1281 /* Visit and value number PHI, return true if the value number
1285 visit_phi (tree phi)
1287 bool changed = false;
1289 tree sameval = VN_TOP;
1290 bool allsame = true;
1293 /* TODO: We could check for this in init_sccvn, and replace this
1294 with a gcc_assert. */
1295 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1296 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1298 /* See if all non-TOP arguments have the same value. TOP is
1299 equivalent to everything, so we can ignore it. */
1300 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1302 tree def = PHI_ARG_DEF (phi, i);
1304 if (TREE_CODE (def) == SSA_NAME)
1305 def = SSA_VAL (def);
1308 if (sameval == VN_TOP)
1314 if (!expressions_equal_p (def, sameval))
1322 /* If all value numbered to the same value, the phi node has that
1326 if (is_gimple_min_invariant (sameval))
1328 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1329 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1333 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1334 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1337 if (TREE_CODE (sameval) == SSA_NAME)
1338 return visit_copy (PHI_RESULT (phi), sameval);
1340 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1343 /* Otherwise, see if it is equivalent to a phi node in this block. */
1344 result = vn_phi_lookup (phi);
1347 if (TREE_CODE (result) == SSA_NAME)
1348 changed = visit_copy (PHI_RESULT (phi), result);
1350 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1354 vn_phi_insert (phi, PHI_RESULT (phi));
1355 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1356 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1357 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1363 /* Return true if EXPR contains constants. */
1366 expr_has_constants (tree expr)
1368 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1371 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1374 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1375 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1376 /* Constants inside reference ops are rarely interesting, but
1377 it can take a lot of looking to find them. */
1379 case tcc_declaration:
1382 return is_gimple_min_invariant (expr);
1387 /* Replace SSA_NAMES in expr with their value numbers, and return the
1389 This is performed in place. */
1392 valueize_expr (tree expr)
1394 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1397 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1398 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1399 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1402 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1403 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1404 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1405 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1406 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1407 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1415 /* Simplify the binary expression RHS, and return the result if
1419 simplify_binary_expression (tree stmt, tree rhs)
1421 tree result = NULL_TREE;
1422 tree op0 = TREE_OPERAND (rhs, 0);
1423 tree op1 = TREE_OPERAND (rhs, 1);
1425 /* This will not catch every single case we could combine, but will
1426 catch those with constants. The goal here is to simultaneously
1427 combine constants between expressions, but avoid infinite
1428 expansion of expressions during simplification. */
1429 if (TREE_CODE (op0) == SSA_NAME)
1431 if (VN_INFO (op0)->has_constants)
1432 op0 = valueize_expr (VN_INFO (op0)->expr);
1433 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1434 op0 = SSA_VAL (op0);
1437 if (TREE_CODE (op1) == SSA_NAME)
1439 if (VN_INFO (op1)->has_constants)
1440 op1 = valueize_expr (VN_INFO (op1)->expr);
1441 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1442 op1 = SSA_VAL (op1);
1445 /* Avoid folding if nothing changed. */
1446 if (op0 == TREE_OPERAND (rhs, 0)
1447 && op1 == TREE_OPERAND (rhs, 1))
1450 fold_defer_overflow_warnings ();
1452 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1454 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1457 /* Make sure result is not a complex expression consisting
1458 of operators of operators (IE (a + b) + (a + c))
1459 Otherwise, we will end up with unbounded expressions if
1460 fold does anything at all. */
1461 if (result && valid_gimple_expression_p (result))
1467 /* Simplify the unary expression RHS, and return the result if
1471 simplify_unary_expression (tree rhs)
1473 tree result = NULL_TREE;
1474 tree op0 = TREE_OPERAND (rhs, 0);
1476 if (TREE_CODE (op0) != SSA_NAME)
1479 if (VN_INFO (op0)->has_constants)
1480 op0 = valueize_expr (VN_INFO (op0)->expr);
1481 else if (TREE_CODE (rhs) == NOP_EXPR
1482 || TREE_CODE (rhs) == CONVERT_EXPR
1483 || TREE_CODE (rhs) == REALPART_EXPR
1484 || TREE_CODE (rhs) == IMAGPART_EXPR
1485 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1487 /* We want to do tree-combining on conversion-like expressions.
1488 Make sure we feed only SSA_NAMEs or constants to fold though. */
1489 tree tem = valueize_expr (VN_INFO (op0)->expr);
1490 if (UNARY_CLASS_P (tem)
1491 || BINARY_CLASS_P (tem)
1492 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1493 || TREE_CODE (tem) == SSA_NAME
1494 || is_gimple_min_invariant (tem))
1498 /* Avoid folding if nothing changed, but remember the expression. */
1499 if (op0 == TREE_OPERAND (rhs, 0))
1502 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1505 STRIP_USELESS_TYPE_CONVERSION (result);
1506 if (valid_gimple_expression_p (result))
1513 /* Try to simplify RHS using equivalences and constant folding. */
1516 try_to_simplify (tree stmt, tree rhs)
1518 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1519 in this case, there is no point in doing extra work. */
1520 if (TREE_CODE (rhs) == SSA_NAME)
1524 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1526 /* For references, see if we find a result for the lookup,
1527 and use it if we do. */
1528 case tcc_declaration:
1529 /* Pull out any truly constant values. */
1530 if (TREE_READONLY (rhs)
1531 && TREE_STATIC (rhs)
1532 && DECL_INITIAL (rhs)
1533 && valid_gimple_expression_p (DECL_INITIAL (rhs)))
1534 return DECL_INITIAL (rhs);
1538 /* Do not do full-blown reference lookup here.
1539 ??? But like for tcc_declaration, we should simplify
1540 from constant initializers. */
1542 /* Fallthrough for some codes that can operate on registers. */
1543 if (!(TREE_CODE (rhs) == REALPART_EXPR
1544 || TREE_CODE (rhs) == IMAGPART_EXPR
1545 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1547 /* We could do a little more with unary ops, if they expand
1548 into binary ops, but it's debatable whether it is worth it. */
1550 return simplify_unary_expression (rhs);
1552 case tcc_comparison:
1554 return simplify_binary_expression (stmt, rhs);
1563 /* Visit and value number USE, return true if the value number
1567 visit_use (tree use)
1569 bool changed = false;
1570 tree stmt = SSA_NAME_DEF_STMT (use);
1573 VN_INFO (use)->use_processed = true;
1575 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1576 if (dump_file && (dump_flags & TDF_DETAILS)
1577 && !IS_EMPTY_STMT (stmt))
1579 fprintf (dump_file, "Value numbering ");
1580 print_generic_expr (dump_file, use, 0);
1581 fprintf (dump_file, " stmt = ");
1582 print_generic_stmt (dump_file, stmt, 0);
1585 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1586 if (TREE_CODE (stmt) == RETURN_EXPR
1587 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1588 stmt = TREE_OPERAND (stmt, 0);
1590 ann = stmt_ann (stmt);
1592 /* Handle uninitialized uses. */
1593 if (IS_EMPTY_STMT (stmt))
1595 changed = set_ssa_val_to (use, use);
1599 if (TREE_CODE (stmt) == PHI_NODE)
1601 changed = visit_phi (stmt);
1603 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1604 || (ann && ann->has_volatile_ops)
1605 || tree_could_throw_p (stmt))
1607 changed = defs_to_varying (stmt);
1611 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1612 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1615 STRIP_USELESS_TYPE_CONVERSION (rhs);
1617 /* Shortcut for copies. Simplifying copies is pointless,
1618 since we copy the expression and value they represent. */
1619 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1621 changed = visit_copy (lhs, rhs);
1624 simplified = try_to_simplify (stmt, rhs);
1625 if (simplified && simplified != rhs)
1627 if (dump_file && (dump_flags & TDF_DETAILS))
1629 fprintf (dump_file, "RHS ");
1630 print_generic_expr (dump_file, rhs, 0);
1631 fprintf (dump_file, " simplified to ");
1632 print_generic_expr (dump_file, simplified, 0);
1633 if (TREE_CODE (lhs) == SSA_NAME)
1634 fprintf (dump_file, " has constants %d\n",
1635 VN_INFO (lhs)->has_constants);
1637 fprintf (dump_file, "\n");
1641 /* Setting value numbers to constants will occasionally
1642 screw up phi congruence because constants are not
1643 uniquely associated with a single ssa name that can be
1645 if (simplified && is_gimple_min_invariant (simplified)
1646 && TREE_CODE (lhs) == SSA_NAME
1647 && simplified != rhs)
1649 VN_INFO (lhs)->expr = simplified;
1650 VN_INFO (lhs)->has_constants = true;
1651 changed = set_ssa_val_to (lhs, simplified);
1654 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1655 && TREE_CODE (lhs) == SSA_NAME)
1657 changed = visit_copy (lhs, simplified);
1660 else if (simplified)
1662 if (TREE_CODE (lhs) == SSA_NAME)
1664 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1665 /* We have to unshare the expression or else
1666 valuizing may change the IL stream. */
1667 VN_INFO (lhs)->expr = unshare_expr (simplified);
1671 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1673 VN_INFO (lhs)->has_constants = true;
1674 VN_INFO (lhs)->expr = unshare_expr (rhs);
1676 else if (TREE_CODE (lhs) == SSA_NAME)
1678 /* We reset expr and constantness here because we may
1679 have been value numbering optimistically, and
1680 iterating. They may become non-constant in this case,
1681 even if they were optimistically constant. */
1683 VN_INFO (lhs)->has_constants = false;
1684 VN_INFO (lhs)->expr = lhs;
1687 if (TREE_CODE (lhs) == SSA_NAME
1688 /* We can substitute SSA_NAMEs that are live over
1689 abnormal edges with their constant value. */
1690 && !is_gimple_min_invariant (rhs)
1691 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1692 changed = defs_to_varying (stmt);
1693 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1695 changed = visit_reference_op_store (lhs, rhs, stmt);
1697 else if (TREE_CODE (lhs) == SSA_NAME)
1699 if (is_gimple_min_invariant (rhs))
1701 VN_INFO (lhs)->has_constants = true;
1702 VN_INFO (lhs)->expr = rhs;
1703 changed = set_ssa_val_to (lhs, rhs);
1707 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1710 changed = visit_unary_op (lhs, rhs);
1713 changed = visit_binary_op (lhs, rhs);
1715 /* If tcc_vl_expr ever encompasses more than
1716 CALL_EXPR, this will need to be changed. */
1718 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
1719 changed = visit_reference_op_load (lhs, rhs, stmt);
1721 changed = defs_to_varying (stmt);
1723 case tcc_declaration:
1725 changed = visit_reference_op_load (lhs, rhs, stmt);
1727 case tcc_expression:
1728 if (TREE_CODE (rhs) == ADDR_EXPR)
1730 changed = visit_unary_op (lhs, rhs);
1735 changed = defs_to_varying (stmt);
1741 changed = defs_to_varying (stmt);
1748 /* Compare two operands by reverse postorder index */
1751 compare_ops (const void *pa, const void *pb)
1753 const tree opa = *((const tree *)pa);
1754 const tree opb = *((const tree *)pb);
1755 tree opstmta = SSA_NAME_DEF_STMT (opa);
1756 tree opstmtb = SSA_NAME_DEF_STMT (opb);
1760 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
1762 else if (IS_EMPTY_STMT (opstmta))
1764 else if (IS_EMPTY_STMT (opstmtb))
1767 bba = bb_for_stmt (opstmta);
1768 bbb = bb_for_stmt (opstmtb);
1779 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
1781 else if (TREE_CODE (opstmta) == PHI_NODE)
1783 else if (TREE_CODE (opstmtb) == PHI_NODE)
1785 return stmt_ann (opstmta)->uid - stmt_ann (opstmtb)->uid;
1787 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
1790 /* Sort an array containing members of a strongly connected component
1791 SCC so that the members are ordered by RPO number.
1792 This means that when the sort is complete, iterating through the
1793 array will give you the members in RPO order. */
1796 sort_scc (VEC (tree, heap) *scc)
1798 qsort (VEC_address (tree, scc),
1799 VEC_length (tree, scc),
1804 /* Process a strongly connected component in the SSA graph. */
1807 process_scc (VEC (tree, heap) *scc)
1809 /* If the SCC has a single member, just visit it. */
1811 if (VEC_length (tree, scc) == 1)
1813 tree use = VEC_index (tree, scc, 0);
1814 if (!VN_INFO (use)->use_processed)
1821 unsigned int iterations = 0;
1822 bool changed = true;
1824 /* Iterate over the SCC with the optimistic table until it stops
1826 current_info = optimistic_info;
1831 htab_empty (optimistic_info->nary);
1832 htab_empty (optimistic_info->phis);
1833 htab_empty (optimistic_info->references);
1834 obstack_free (&optimistic_info->nary_obstack, NULL);
1835 gcc_obstack_init (&optimistic_info->nary_obstack);
1836 empty_alloc_pool (optimistic_info->phis_pool);
1837 empty_alloc_pool (optimistic_info->references_pool);
1838 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1839 changed |= visit_use (var);
1842 if (dump_file && (dump_flags & TDF_STATS))
1843 fprintf (dump_file, "Processing SCC required %d iterations\n",
1846 /* Finally, visit the SCC once using the valid table. */
1847 current_info = valid_info;
1848 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1853 /* Depth first search on NAME to discover and process SCC's in the SSA
1855 Execution of this algorithm relies on the fact that the SCC's are
1856 popped off the stack in topological order.
1857 Returns true if successful, false if we stopped processing SCC's due
1858 to ressource constraints. */
1868 VN_INFO (name)->dfsnum = next_dfs_num++;
1869 VN_INFO (name)->visited = true;
1870 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
1872 VEC_safe_push (tree, heap, sccstack, name);
1873 VN_INFO (name)->on_sccstack = true;
1874 defstmt = SSA_NAME_DEF_STMT (name);
1876 /* Recursively DFS on our operands, looking for SCC's. */
1877 if (!IS_EMPTY_STMT (defstmt))
1879 FOR_EACH_PHI_OR_STMT_USE (usep, SSA_NAME_DEF_STMT (name), iter,
1882 tree use = USE_FROM_PTR (usep);
1884 /* Since we handle phi nodes, we will sometimes get
1885 invariants in the use expression. */
1886 if (TREE_CODE (use) != SSA_NAME)
1889 if (! (VN_INFO (use)->visited))
1893 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
1894 VN_INFO (use)->low);
1896 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
1897 && VN_INFO (use)->on_sccstack)
1899 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
1900 VN_INFO (name)->low);
1905 /* See if we found an SCC. */
1906 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
1908 VEC (tree, heap) *scc = NULL;
1911 /* Found an SCC, pop the components off the SCC stack and
1915 x = VEC_pop (tree, sccstack);
1917 VN_INFO (x)->on_sccstack = false;
1918 VEC_safe_push (tree, heap, scc, x);
1919 } while (x != name);
1921 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
1922 if (VEC_length (tree, scc)
1923 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
1926 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
1927 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
1928 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
1932 if (VEC_length (tree, scc) > 1)
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1936 print_scc (dump_file, scc);
1940 VEC_free (tree, heap, scc);
1946 /* Allocate a value number table. */
1949 allocate_vn_table (vn_tables_t table)
1951 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
1952 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
1953 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
1956 gcc_obstack_init (&table->nary_obstack);
1957 table->phis_pool = create_alloc_pool ("VN phis",
1958 sizeof (struct vn_phi_s),
1960 table->references_pool = create_alloc_pool ("VN references",
1961 sizeof (struct vn_reference_s),
1965 /* Free a value number table. */
1968 free_vn_table (vn_tables_t table)
1970 htab_delete (table->phis);
1971 htab_delete (table->nary);
1972 htab_delete (table->references);
1973 obstack_free (&table->nary_obstack, NULL);
1974 free_alloc_pool (table->phis_pool);
1975 free_alloc_pool (table->references_pool);
1983 int *rpo_numbers_temp;
1987 calculate_dominance_info (CDI_DOMINATORS);
1991 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
1992 /* VEC_alloc doesn't actually grow it to the right size, it just
1993 preallocates the space to do so. */
1994 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
1995 gcc_obstack_init (&vn_ssa_aux_obstack);
1997 shared_lookup_phiargs = NULL;
1998 shared_lookup_vops = NULL;
1999 shared_lookup_references = NULL;
2000 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2001 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2002 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2004 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2005 the i'th block in RPO order is bb. We want to map bb's to RPO
2006 numbers, so we need to rearrange this array. */
2007 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2008 rpo_numbers[rpo_numbers_temp[j]] = j;
2010 XDELETE (rpo_numbers_temp);
2012 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2014 /* Create the VN_INFO structures, and initialize value numbers to
2016 for (i = 0; i < num_ssa_names; i++)
2018 tree name = ssa_name (i);
2021 VN_INFO_GET (name)->valnum = VN_TOP;
2022 VN_INFO (name)->expr = name;
2028 block_stmt_iterator bsi;
2029 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2031 tree stmt = bsi_stmt (bsi);
2032 stmt_ann (stmt)->uid = id++;
2036 /* Create the valid and optimistic value numbering tables. */
2037 valid_info = XCNEW (struct vn_tables_s);
2038 allocate_vn_table (valid_info);
2039 optimistic_info = XCNEW (struct vn_tables_s);
2040 allocate_vn_table (optimistic_info);
2045 switch_to_PRE_table (void)
2047 pre_info = XCNEW (struct vn_tables_s);
2048 allocate_vn_table (pre_info);
2049 current_info = pre_info;
2057 VEC_free (tree, heap, shared_lookup_phiargs);
2058 VEC_free (tree, gc, shared_lookup_vops);
2059 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2060 XDELETEVEC (rpo_numbers);
2062 for (i = 0; i < num_ssa_names; i++)
2064 tree name = ssa_name (i);
2066 && SSA_NAME_VALUE (name)
2067 && TREE_CODE (SSA_NAME_VALUE (name)) == VALUE_HANDLE)
2068 SSA_NAME_VALUE (name) = NULL;
2070 && VN_INFO (name)->needs_insertion)
2071 release_ssa_name (name);
2073 obstack_free (&vn_ssa_aux_obstack, NULL);
2074 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2076 VEC_free (tree, heap, sccstack);
2077 free_vn_table (valid_info);
2078 XDELETE (valid_info);
2079 free_vn_table (optimistic_info);
2080 XDELETE (optimistic_info);
2083 free_vn_table (pre_info);
2088 /* Do SCCVN. Returns true if it finished, false if we bailed out
2089 due to ressource constraints. */
2092 run_scc_vn (bool may_insert_arg)
2097 may_insert = may_insert_arg;
2100 current_info = valid_info;
2102 for (param = DECL_ARGUMENTS (current_function_decl);
2104 param = TREE_CHAIN (param))
2106 if (gimple_default_def (cfun, param) != NULL)
2108 tree def = gimple_default_def (cfun, param);
2109 SSA_VAL (def) = def;
2113 for (i = 1; i < num_ssa_names; ++i)
2115 tree name = ssa_name (i);
2117 && VN_INFO (name)->visited == false
2118 && !has_zero_uses (name))
2127 if (dump_file && (dump_flags & TDF_DETAILS))
2129 fprintf (dump_file, "Value numbers:\n");
2130 for (i = 0; i < num_ssa_names; i++)
2132 tree name = ssa_name (i);
2133 if (name && VN_INFO (name)->visited
2134 && (SSA_VAL (name) != name
2135 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2137 print_generic_expr (dump_file, name, 0);
2138 fprintf (dump_file, " = ");
2139 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2140 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2142 print_generic_expr (dump_file, SSA_VAL (name), 0);
2143 fprintf (dump_file, "\n");