1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "tree-gimple.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* PRE hashtables storing information about mapping from expressions to
135 static vn_tables_t pre_info;
137 /* Pointer to the set of hashtables that is currently being used.
138 Should always point to either the optimistic_info, or the
141 static vn_tables_t current_info;
144 /* Reverse post order index for each basic block. */
146 static int *rpo_numbers;
148 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
150 /* This represents the top of the VN lattice, which is the universal
155 /* Unique counter for our value ids. */
157 static unsigned int next_value_id;
159 /* Next DFS number and the stack for strongly connected component
162 static unsigned int next_dfs_num;
163 static VEC (tree, heap) *sccstack;
165 static bool may_insert;
168 DEF_VEC_P(vn_ssa_aux_t);
169 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
171 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
172 are allocated on an obstack for locality reasons, and to free them
173 without looping over the VEC. */
175 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
176 static struct obstack vn_ssa_aux_obstack;
178 /* Return the value numbering information for a given SSA name. */
183 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
184 SSA_NAME_VERSION (name));
189 /* Set the value numbering info for a given SSA name to a given
193 VN_INFO_SET (tree name, vn_ssa_aux_t value)
195 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
196 SSA_NAME_VERSION (name), value);
199 /* Initialize the value numbering info for a given SSA name.
200 This should be called just once for every SSA name. */
203 VN_INFO_GET (tree name)
205 vn_ssa_aux_t newinfo;
207 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
208 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
209 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
210 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
211 SSA_NAME_VERSION (name) + 1);
212 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
213 SSA_NAME_VERSION (name), newinfo);
218 /* Free a phi operation structure VP. */
223 vn_phi_t phi = (vn_phi_t) vp;
224 VEC_free (tree, heap, phi->phiargs);
227 /* Free a reference operation structure VP. */
230 free_reference (void *vp)
232 vn_reference_t vr = (vn_reference_t) vp;
233 VEC_free (vn_reference_op_s, heap, vr->operands);
236 /* Hash table equality function for vn_constant_t. */
239 vn_constant_eq (const void *p1, const void *p2)
241 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
242 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
244 return expressions_equal_p (vc1->constant, vc2->constant);
247 /* Hash table hash function for vn_constant_t. */
250 vn_constant_hash (const void *p1)
252 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
253 return vc1->hashcode;
256 /* Lookup a value id for CONSTANT, and if it does not exist, create a
257 new one and return it. If it does exist, return it. */
260 get_or_alloc_constant_value_id (tree constant)
263 vn_constant_t vc = XNEW (struct vn_constant_s);
265 vc->hashcode = iterative_hash_expr (constant, 0);
266 vc->constant = constant;
267 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
268 vc->hashcode, INSERT);
272 return ((vn_constant_t)*slot)->value_id;
274 vc->value_id = get_next_value_id ();
276 bitmap_set_bit (constant_value_ids, vc->value_id);
280 /* Return true if V is a value id for a constant. */
283 value_id_constant_p (unsigned int v)
285 return bitmap_bit_p (constant_value_ids, v);
288 /* Compare two reference operands P1 and P2 for equality. Return true if
289 they are equal, and false otherwise. */
292 vn_reference_op_eq (const void *p1, const void *p2)
294 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
295 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
296 return vro1->opcode == vro2->opcode
297 && vro1->type == vro2->type
298 && expressions_equal_p (vro1->op0, vro2->op0)
299 && expressions_equal_p (vro1->op1, vro2->op1)
300 && expressions_equal_p (vro1->op2, vro2->op2);
303 /* Compute the hash for a reference operand VRO1. */
306 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
308 return iterative_hash_expr (vro1->op0, vro1->opcode)
309 + iterative_hash_expr (vro1->op1, vro1->opcode);
312 /* Return the hashcode for a given reference operation P1. */
315 vn_reference_hash (const void *p1)
317 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
318 return vr1->hashcode;
321 /* Compute a hash for the reference operation VR1 and return it. */
324 vn_reference_compute_hash (const vn_reference_t vr1)
326 hashval_t result = 0;
329 vn_reference_op_t vro;
331 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
332 result += iterative_hash_expr (v, 0);
333 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
334 result += vn_reference_op_compute_hash (vro);
339 /* Return true if reference operations P1 and P2 are equivalent. This
340 means they have the same set of operands and vuses. */
343 vn_reference_eq (const void *p1, const void *p2)
347 vn_reference_op_t vro;
349 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
350 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
352 if (vr1->vuses == vr2->vuses
353 && vr1->operands == vr2->operands)
356 /* Impossible for them to be equivalent if they have different
358 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
361 /* We require that address operands be canonicalized in a way that
362 two memory references will have the same operands if they are
364 if (VEC_length (vn_reference_op_s, vr1->operands)
365 != VEC_length (vn_reference_op_s, vr2->operands))
368 /* The memory state is more often different than the address of the
369 store/load, so check it first. */
370 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
372 if (VEC_index (tree, vr2->vuses, i) != v)
376 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
378 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
385 /* Place the vuses from STMT into *result. */
388 vuses_to_vec (tree stmt, VEC (tree, gc) **result)
396 VEC_reserve_exact (tree, gc, *result,
397 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
399 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
400 VEC_quick_push (tree, *result, vuse);
404 /* Copy the VUSE names in STMT into a vector, and return
408 copy_vuses_from_stmt (tree stmt)
410 VEC (tree, gc) *vuses = NULL;
412 vuses_to_vec (stmt, &vuses);
417 /* Place the vdefs from STMT into *result. */
420 vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
428 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
430 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
431 VEC_quick_push (tree, *result, vdef);
434 /* Copy the names of vdef results in STMT into a vector, and return
437 static VEC (tree, gc) *
438 copy_vdefs_from_stmt (tree stmt)
440 VEC (tree, gc) *vdefs = NULL;
442 vdefs_to_vec (stmt, &vdefs);
447 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
448 static VEC (tree, gc) *shared_lookup_vops;
450 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
451 This function will overwrite the current SHARED_LOOKUP_VOPS
455 shared_vuses_from_stmt (tree stmt)
457 VEC_truncate (tree, shared_lookup_vops, 0);
458 vuses_to_vec (stmt, &shared_lookup_vops);
460 return shared_lookup_vops;
463 /* Copy the operations present in load/store/call REF into RESULT, a vector of
464 vn_reference_op_s's. */
467 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
469 /* Calls are different from all other reference operations. */
470 if (TREE_CODE (ref) == CALL_EXPR)
472 vn_reference_op_s temp;
474 call_expr_arg_iterator iter;
477 /* Copy the call_expr opcode, type, function being called, and
479 memset (&temp, 0, sizeof (temp));
480 temp.type = TREE_TYPE (ref);
481 temp.opcode = CALL_EXPR;
482 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
484 /* We make no attempt to simplify the called function because
485 the typical &FUNCTION_DECL form is also used in function pointer
486 cases that become constant. If we simplify the original to
487 FUNCTION_DECL but not the function pointer case (which can
488 happen because we have no fold functions that operate on
489 vn_reference_t), we will claim they are not equivalent.
491 An example of this behavior can be see if CALL_EXPR_FN below is
492 replaced with get_callee_fndecl and gcc.dg/tree-ssa/ssa-pre-13.c
494 callfn = CALL_EXPR_FN (ref);
495 temp.type = TREE_TYPE (callfn);
496 temp.opcode = TREE_CODE (callfn);
498 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
500 FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
502 memset (&temp, 0, sizeof (temp));
503 temp.type = TREE_TYPE (callarg);
504 temp.opcode = TREE_CODE (callarg);
506 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
511 if (TREE_CODE (ref) == TARGET_MEM_REF)
513 vn_reference_op_s temp;
515 memset (&temp, 0, sizeof (temp));
516 /* We do not care for spurious type qualifications. */
517 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
518 temp.opcode = TREE_CODE (ref);
519 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
520 temp.op1 = TMR_INDEX (ref);
521 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
523 memset (&temp, 0, sizeof (temp));
524 temp.type = NULL_TREE;
525 temp.opcode = TREE_CODE (ref);
526 temp.op0 = TMR_STEP (ref);
527 temp.op1 = TMR_OFFSET (ref);
528 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
532 /* For non-calls, store the information that makes up the address. */
536 vn_reference_op_s temp;
538 memset (&temp, 0, sizeof (temp));
539 /* We do not care for spurious type qualifications. */
540 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
541 temp.opcode = TREE_CODE (ref);
545 case ALIGN_INDIRECT_REF:
546 case MISALIGNED_INDIRECT_REF:
548 /* The only operand is the address, which gets its own
549 vn_reference_op_s structure. */
552 /* Record bits and position. */
553 temp.op0 = TREE_OPERAND (ref, 1);
554 temp.op1 = TREE_OPERAND (ref, 2);
557 /* The field decl is enough to unambiguously specify the field,
558 a matching type is not necessary and a mismatching type
559 is always a spurious difference. */
560 temp.type = NULL_TREE;
562 /* If this is a reference to a union member, record the union
563 member size as operand. Do so only if we are doing
564 expression insertion (during FRE), as PRE currently gets
565 confused with this. */
567 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
568 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
569 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
570 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
573 /* Record field as operand. */
574 temp.op0 = TREE_OPERAND (ref, 1);
575 temp.op1 = TREE_OPERAND (ref, 2);
577 case ARRAY_RANGE_REF:
579 /* Record index as operand. */
580 temp.op0 = TREE_OPERAND (ref, 1);
581 temp.op1 = TREE_OPERAND (ref, 2);
582 temp.op2 = TREE_OPERAND (ref, 3);
597 /* These are only interesting for their operands, their
598 existence, and their type. They will never be the last
599 ref in the chain of references (IE they require an
600 operand), so we don't have to put anything
601 for op* as it will be handled by the iteration */
604 case VIEW_CONVERT_EXPR:
611 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
613 if (REFERENCE_CLASS_P (ref) || TREE_CODE (ref) == ADDR_EXPR)
614 ref = TREE_OPERAND (ref, 0);
620 /* Create a vector of vn_reference_op_s structures from REF, a
621 REFERENCE_CLASS_P tree. The vector is not shared. */
623 static VEC(vn_reference_op_s, heap) *
624 create_reference_ops_from_ref (tree ref)
626 VEC (vn_reference_op_s, heap) *result = NULL;
628 copy_reference_ops_from_ref (ref, &result);
632 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
634 /* Create a vector of vn_reference_op_s structures from REF, a
635 REFERENCE_CLASS_P tree. The vector is shared among all callers of
638 static VEC(vn_reference_op_s, heap) *
639 shared_reference_ops_from_ref (tree ref)
643 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
644 copy_reference_ops_from_ref (ref, &shared_lookup_references);
645 return shared_lookup_references;
649 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
650 structures into their value numbers. This is done in-place, and
651 the vector passed in is returned. */
653 static VEC (vn_reference_op_s, heap) *
654 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
656 vn_reference_op_t vro;
659 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
661 if (vro->opcode == SSA_NAME
662 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
664 vro->op0 = SSA_VAL (vro->op0);
665 /* If it transforms from an SSA_NAME to a constant, update
667 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
668 vro->opcode = TREE_CODE (vro->op0);
670 /* TODO: Do we want to valueize op2 and op1 of
671 ARRAY_REF/COMPONENT_REF for Ada */
678 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
679 their value numbers. This is done in-place, and the vector passed
682 static VEC (tree, gc) *
683 valueize_vuses (VEC (tree, gc) *orig)
685 bool made_replacement = false;
689 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
691 if (vuse != SSA_VAL (vuse))
693 made_replacement = true;
694 VEC_replace (tree, orig, i, SSA_VAL (vuse));
698 if (made_replacement && VEC_length (tree, orig) > 1)
704 /* Return the single reference statement defining all virtual uses
705 in VUSES or NULL_TREE, if there are multiple defining statements.
706 Take into account only definitions that alias REF if following
710 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
715 gcc_assert (VEC_length (tree, vuses) >= 1);
717 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
718 if (TREE_CODE (def_stmt) == PHI_NODE)
720 /* We can only handle lookups over PHI nodes for a single
722 if (VEC_length (tree, vuses) == 1)
724 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
731 /* Verify each VUSE reaches the same defining stmt. */
732 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
734 tree tmp = SSA_NAME_DEF_STMT (vuse);
739 /* Now see if the definition aliases ref, and loop until it does. */
742 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
743 && !get_call_expr_in (def_stmt)
744 && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
745 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
750 /* Lookup a SCCVN reference operation VR in the current hash table.
751 Returns the resulting value number if it exists in the hash table,
752 NULL_TREE otherwise. VNRESULT will be filled in with the actual
753 vn_reference_t stored in the hashtable if something is found. */
756 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
762 slot = htab_find_slot_with_hash (current_info->references, vr,
764 if (!slot && current_info == optimistic_info)
765 slot = htab_find_slot_with_hash (valid_info->references, vr,
770 *vnresult = (vn_reference_t)*slot;
771 return ((vn_reference_t)*slot)->result;
778 /* Lookup a reference operation by it's parts, in the current hash table.
779 Returns the resulting value number if it exists in the hash table,
780 NULL_TREE otherwise. VNRESULT will be filled in with the actual
781 vn_reference_t stored in the hashtable if something is found. */
784 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
785 VEC (vn_reference_op_s, heap) *operands,
786 vn_reference_t *vnresult)
788 struct vn_reference_s vr1;
793 vr1.vuses = valueize_vuses (vuses);
794 vr1.operands = valueize_refs (operands);
795 vr1.hashcode = vn_reference_compute_hash (&vr1);
796 result = vn_reference_lookup_1 (&vr1, vnresult);
801 /* Lookup OP in the current hash table, and return the resulting value
802 number if it exists in the hash table. Return NULL_TREE if it does
803 not exist in the hash table or if the result field of the structure
804 was NULL.. VNRESULT will be filled in with the vn_reference_t
805 stored in the hashtable if one exists. */
808 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
809 vn_reference_t *vnresult)
811 struct vn_reference_s vr1;
812 tree result, def_stmt;
816 vr1.vuses = valueize_vuses (vuses);
817 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
818 vr1.hashcode = vn_reference_compute_hash (&vr1);
819 result = vn_reference_lookup_1 (&vr1, vnresult);
821 /* If there is a single defining statement for all virtual uses, we can
822 use that, following virtual use-def chains. */
826 && VEC_length (tree, vr1.vuses) >= 1
827 && !get_call_expr_in (op)
828 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
829 && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
830 /* If there is a call involved, op must be assumed to
832 && !get_call_expr_in (def_stmt))
834 /* We are now at an aliasing definition for the vuses we want to
835 look up. Re-do the lookup with the vdefs for this stmt. */
836 vdefs_to_vec (def_stmt, &vuses);
837 vr1.vuses = valueize_vuses (vuses);
838 vr1.hashcode = vn_reference_compute_hash (&vr1);
839 result = vn_reference_lookup_1 (&vr1, vnresult);
846 /* Insert OP into the current hash table with a value number of
847 RESULT, and return the resulting reference structure we created. */
850 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
855 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
856 if (TREE_CODE (result) == SSA_NAME)
857 vr1->value_id = VN_INFO (result)->value_id;
859 vr1->value_id = get_or_alloc_constant_value_id (result);
860 vr1->vuses = valueize_vuses (vuses);
861 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
862 vr1->hashcode = vn_reference_compute_hash (vr1);
863 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
865 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
868 /* Because we lookup stores using vuses, and value number failures
869 using the vdefs (see visit_reference_op_store for how and why),
870 it's possible that on failure we may try to insert an already
871 inserted store. This is not wrong, there is no ssa name for a
872 store that we could use as a differentiator anyway. Thus, unlike
873 the other lookup functions, you cannot gcc_assert (!*slot)
876 /* But free the old slot in case of a collision. */
878 free_reference (*slot);
884 /* Insert a reference by it's pieces into the current hash table with
885 a value number of RESULT. Return the resulting reference
886 structure we created. */
889 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
890 VEC (vn_reference_op_s, heap) *operands,
891 tree result, unsigned int value_id)
897 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
898 vr1->value_id = value_id;
899 vr1->vuses = valueize_vuses (vuses);
900 vr1->operands = valueize_refs (operands);
901 vr1->hashcode = vn_reference_compute_hash (vr1);
902 if (result && TREE_CODE (result) == SSA_NAME)
903 result = SSA_VAL (result);
904 vr1->result = result;
906 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
909 /* At this point we should have all the things inserted that we have
910 seen before, and we should never try inserting something that
914 free_reference (*slot);
920 /* Compute and return the hash value for nary operation VBO1. */
923 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
928 for (i = 0; i < vno1->length; ++i)
929 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
930 vno1->op[i] = SSA_VAL (vno1->op[i]);
932 if (vno1->length == 2
933 && commutative_tree_code (vno1->opcode)
934 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
936 tree temp = vno1->op[0];
937 vno1->op[0] = vno1->op[1];
941 for (i = 0; i < vno1->length; ++i)
942 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
947 /* Return the computed hashcode for nary operation P1. */
950 vn_nary_op_hash (const void *p1)
952 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
953 return vno1->hashcode;
956 /* Compare nary operations P1 and P2 and return true if they are
960 vn_nary_op_eq (const void *p1, const void *p2)
962 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
963 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
966 if (vno1->opcode != vno2->opcode
967 || vno1->type != vno2->type)
970 for (i = 0; i < vno1->length; ++i)
971 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
977 /* Lookup a n-ary operation by its pieces and return the resulting value
978 number if it exists in the hash table. Return NULL_TREE if it does
979 not exist in the hash table or if the result field of the operation
980 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
984 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
985 tree type, tree op0, tree op1, tree op2,
986 tree op3, vn_nary_op_t *vnresult)
989 struct vn_nary_op_s vno1;
993 vno1.length = length;
999 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1000 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1002 if (!slot && current_info == optimistic_info)
1003 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1008 *vnresult = (vn_nary_op_t)*slot;
1009 return ((vn_nary_op_t)*slot)->result;
1012 /* Lookup OP in the current hash table, and return the resulting value
1013 number if it exists in the hash table. Return NULL_TREE if it does
1014 not exist in the hash table or if the result field of the operation
1015 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1019 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1022 struct vn_nary_op_s vno1;
1027 vno1.opcode = TREE_CODE (op);
1028 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1029 vno1.type = TREE_TYPE (op);
1030 for (i = 0; i < vno1.length; ++i)
1031 vno1.op[i] = TREE_OPERAND (op, i);
1032 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1033 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1035 if (!slot && current_info == optimistic_info)
1036 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1041 *vnresult = (vn_nary_op_t)*slot;
1042 return ((vn_nary_op_t)*slot)->result;
1045 /* Insert a n-ary operation into the current hash table using it's
1046 pieces. Return the vn_nary_op_t structure we created and put in
1050 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1051 tree type, tree op0,
1052 tree op1, tree op2, tree op3,
1054 unsigned int value_id)
1059 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1060 (sizeof (struct vn_nary_op_s)
1061 - sizeof (tree) * (4 - length)));
1062 vno1->value_id = value_id;
1063 vno1->opcode = code;
1064 vno1->length = length;
1074 vno1->result = result;
1075 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1076 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1078 gcc_assert (!*slot);
1085 /* Insert OP into the current hash table with a value number of
1086 RESULT. Return the vn_nary_op_t structure we created and put in
1090 vn_nary_op_insert (tree op, tree result)
1092 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1097 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1098 (sizeof (struct vn_nary_op_s)
1099 - sizeof (tree) * (4 - length)));
1100 vno1->value_id = VN_INFO (result)->value_id;
1101 vno1->opcode = TREE_CODE (op);
1102 vno1->length = length;
1103 vno1->type = TREE_TYPE (op);
1104 for (i = 0; i < vno1->length; ++i)
1105 vno1->op[i] = TREE_OPERAND (op, i);
1106 vno1->result = result;
1107 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1108 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1110 gcc_assert (!*slot);
1116 /* Compute a hashcode for PHI operation VP1 and return it. */
1118 static inline hashval_t
1119 vn_phi_compute_hash (vn_phi_t vp1)
1121 hashval_t result = 0;
1125 result = vp1->block->index;
1127 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1129 if (phi1op == VN_TOP)
1131 result += iterative_hash_expr (phi1op, result);
1137 /* Return the computed hashcode for phi operation P1. */
1140 vn_phi_hash (const void *p1)
1142 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1143 return vp1->hashcode;
1146 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1149 vn_phi_eq (const void *p1, const void *p2)
1151 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1152 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1154 if (vp1->block == vp2->block)
1159 /* Any phi in the same block will have it's arguments in the
1160 same edge order, because of how we store phi nodes. */
1161 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1163 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1164 if (phi1op == VN_TOP || phi2op == VN_TOP)
1166 if (!expressions_equal_p (phi1op, phi2op))
1174 static VEC(tree, heap) *shared_lookup_phiargs;
1176 /* Lookup PHI in the current hash table, and return the resulting
1177 value number if it exists in the hash table. Return NULL_TREE if
1178 it does not exist in the hash table. */
1181 vn_phi_lookup (tree phi)
1184 struct vn_phi_s vp1;
1187 VEC_truncate (tree, shared_lookup_phiargs, 0);
1189 /* Canonicalize the SSA_NAME's to their value number. */
1190 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1192 tree def = PHI_ARG_DEF (phi, i);
1193 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1194 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1196 vp1.phiargs = shared_lookup_phiargs;
1197 vp1.block = bb_for_stmt (phi);
1198 vp1.hashcode = vn_phi_compute_hash (&vp1);
1199 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1201 if (!slot && current_info == optimistic_info)
1202 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1206 return ((vn_phi_t)*slot)->result;
1209 /* Insert PHI into the current hash table with a value number of
1213 vn_phi_insert (tree phi, tree result)
1216 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1218 VEC (tree, heap) *args = NULL;
1220 /* Canonicalize the SSA_NAME's to their value number. */
1221 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1223 tree def = PHI_ARG_DEF (phi, i);
1224 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1225 VEC_safe_push (tree, heap, args, def);
1227 vp1->value_id = VN_INFO (result)->value_id;
1228 vp1->phiargs = args;
1229 vp1->block = bb_for_stmt (phi);
1230 vp1->result = result;
1231 vp1->hashcode = vn_phi_compute_hash (vp1);
1233 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1236 /* Because we iterate over phi operations more than once, it's
1237 possible the slot might already exist here, hence no assert.*/
1243 /* Print set of components in strongly connected component SCC to OUT. */
1246 print_scc (FILE *out, VEC (tree, heap) *scc)
1251 fprintf (out, "SCC consists of: ");
1252 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1254 print_generic_expr (out, var, 0);
1257 fprintf (out, "\n");
1260 /* Set the value number of FROM to TO, return true if it has changed
1264 set_ssa_val_to (tree from, tree to)
1269 && TREE_CODE (to) == SSA_NAME
1270 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1273 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1274 and invariants. So assert that here. */
1275 gcc_assert (to != NULL_TREE
1277 || TREE_CODE (to) == SSA_NAME
1278 || is_gimple_min_invariant (to)));
1280 if (dump_file && (dump_flags & TDF_DETAILS))
1282 fprintf (dump_file, "Setting value number of ");
1283 print_generic_expr (dump_file, from, 0);
1284 fprintf (dump_file, " to ");
1285 print_generic_expr (dump_file, to, 0);
1286 fprintf (dump_file, "\n");
1289 currval = SSA_VAL (from);
1291 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1293 SSA_VAL (from) = to;
1299 /* Set all definitions in STMT to value number to themselves.
1300 Return true if a value number changed. */
1303 defs_to_varying (tree stmt)
1305 bool changed = false;
1309 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1311 tree def = DEF_FROM_PTR (defp);
1313 VN_INFO (def)->use_processed = true;
1314 changed |= set_ssa_val_to (def, def);
1319 static bool expr_has_constants (tree expr);
1320 static tree try_to_simplify (tree stmt, tree rhs);
1322 /* Visit a copy between LHS and RHS, return true if the value number
1326 visit_copy (tree lhs, tree rhs)
1329 /* Follow chains of copies to their destination. */
1330 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1331 rhs = SSA_VAL (rhs);
1333 /* The copy may have a more interesting constant filled expression
1334 (we don't, since we know our RHS is just an SSA name). */
1335 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1336 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1338 return set_ssa_val_to (lhs, rhs);
1341 /* Visit a unary operator RHS, value number it, and return true if the
1342 value number of LHS has changed as a result. */
1345 visit_unary_op (tree lhs, tree op)
1347 bool changed = false;
1348 tree result = vn_nary_op_lookup (op, NULL);
1352 changed = set_ssa_val_to (lhs, result);
1356 changed = set_ssa_val_to (lhs, lhs);
1357 vn_nary_op_insert (op, lhs);
1363 /* Visit a binary operator RHS, value number it, and return true if the
1364 value number of LHS has changed as a result. */
1367 visit_binary_op (tree lhs, tree op)
1369 bool changed = false;
1370 tree result = vn_nary_op_lookup (op, NULL);
1374 changed = set_ssa_val_to (lhs, result);
1378 changed = set_ssa_val_to (lhs, lhs);
1379 vn_nary_op_insert (op, lhs);
1385 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1386 and return true if the value number of the LHS has changed as a result. */
1389 visit_reference_op_load (tree lhs, tree op, tree stmt)
1391 bool changed = false;
1392 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1395 /* We handle type-punning through unions by value-numbering based
1396 on offset and size of the access. Be prepared to handle a
1397 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1399 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1401 /* We will be setting the value number of lhs to the value number
1402 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1403 So first simplify and lookup this expression to see if it
1404 is already available. */
1405 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1407 && !is_gimple_min_invariant (val)
1408 && TREE_CODE (val) != SSA_NAME)
1410 tree tem = try_to_simplify (stmt, val);
1415 if (!is_gimple_min_invariant (val)
1416 && TREE_CODE (val) != SSA_NAME)
1417 result = vn_nary_op_lookup (val, NULL);
1418 /* If the expression is not yet available, value-number lhs to
1419 a new SSA_NAME we create. */
1420 if (!result && may_insert)
1422 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
1423 /* Initialize value-number information properly. */
1424 VN_INFO_GET (result)->valnum = result;
1425 VN_INFO (result)->expr = val;
1426 VN_INFO (result)->has_constants = expr_has_constants (val);
1427 VN_INFO (result)->needs_insertion = true;
1428 /* As all "inserted" statements are singleton SCCs, insert
1429 to the valid table. This is strictly needed to
1430 avoid re-generating new value SSA_NAMEs for the same
1431 expression during SCC iteration over and over (the
1432 optimistic table gets cleared after each iteration).
1433 We do not need to insert into the optimistic table, as
1434 lookups there will fall back to the valid table. */
1435 if (current_info == optimistic_info)
1437 current_info = valid_info;
1438 vn_nary_op_insert (val, result);
1439 current_info = optimistic_info;
1442 vn_nary_op_insert (val, result);
1443 if (dump_file && (dump_flags & TDF_DETAILS))
1445 fprintf (dump_file, "Inserting name ");
1446 print_generic_expr (dump_file, result, 0);
1447 fprintf (dump_file, " for expression ");
1448 print_generic_expr (dump_file, val, 0);
1449 fprintf (dump_file, "\n");
1456 changed = set_ssa_val_to (lhs, result);
1457 if (TREE_CODE (result) == SSA_NAME
1458 && VN_INFO (result)->has_constants)
1460 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1461 VN_INFO (lhs)->has_constants = true;
1466 changed = set_ssa_val_to (lhs, lhs);
1467 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1474 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1475 and return true if the value number of the LHS has changed as a result. */
1478 visit_reference_op_store (tree lhs, tree op, tree stmt)
1480 bool changed = false;
1482 bool resultsame = false;
1484 /* First we want to lookup using the *vuses* from the store and see
1485 if there the last store to this location with the same address
1488 The vuses represent the memory state before the store. If the
1489 memory state, address, and value of the store is the same as the
1490 last store to this location, then this store will produce the
1491 same memory state as that store.
1493 In this case the vdef versions for this store are value numbered to those
1494 vuse versions, since they represent the same memory state after
1497 Otherwise, the vdefs for the store are used when inserting into
1498 the table, since the store generates a new memory state. */
1500 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1505 if (TREE_CODE (result) == SSA_NAME)
1506 result = SSA_VAL (result);
1507 if (TREE_CODE (op) == SSA_NAME)
1509 resultsame = expressions_equal_p (result, op);
1512 if (!result || !resultsame)
1514 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1518 if (dump_file && (dump_flags & TDF_DETAILS))
1520 fprintf (dump_file, "No store match\n");
1521 fprintf (dump_file, "Value numbering store ");
1522 print_generic_expr (dump_file, lhs, 0);
1523 fprintf (dump_file, " to ");
1524 print_generic_expr (dump_file, op, 0);
1525 fprintf (dump_file, "\n");
1527 /* Have to set value numbers before insert, since insert is
1528 going to valueize the references in-place. */
1529 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1531 VN_INFO (vdef)->use_processed = true;
1532 changed |= set_ssa_val_to (vdef, vdef);
1535 /* Do not insert structure copies into the tables. */
1536 if (is_gimple_min_invariant (op)
1537 || is_gimple_reg (op))
1538 vn_reference_insert (lhs, op, vdefs);
1542 /* We had a match, so value number the vdefs to have the value
1543 number of the vuses they came from. */
1544 ssa_op_iter op_iter;
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1549 fprintf (dump_file, "Store matched earlier value,"
1550 "value numbering store vdefs to matching vuses.\n");
1552 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1554 tree def = DEF_FROM_PTR (var);
1557 /* Uh, if the vuse is a multiuse, we can't really do much
1558 here, sadly, since we don't know which value number of
1559 which vuse to use. */
1560 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1563 use = VUSE_ELEMENT_VAR (*vv, 0);
1565 VN_INFO (def)->use_processed = true;
1566 changed |= set_ssa_val_to (def, SSA_VAL (use));
1573 /* Visit and value number PHI, return true if the value number
1577 visit_phi (tree phi)
1579 bool changed = false;
1581 tree sameval = VN_TOP;
1582 bool allsame = true;
1585 /* TODO: We could check for this in init_sccvn, and replace this
1586 with a gcc_assert. */
1587 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1588 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1590 /* See if all non-TOP arguments have the same value. TOP is
1591 equivalent to everything, so we can ignore it. */
1592 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
1594 tree def = PHI_ARG_DEF (phi, i);
1596 if (TREE_CODE (def) == SSA_NAME)
1597 def = SSA_VAL (def);
1600 if (sameval == VN_TOP)
1606 if (!expressions_equal_p (def, sameval))
1614 /* If all value numbered to the same value, the phi node has that
1618 if (is_gimple_min_invariant (sameval))
1620 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1621 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1625 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1626 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1629 if (TREE_CODE (sameval) == SSA_NAME)
1630 return visit_copy (PHI_RESULT (phi), sameval);
1632 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1635 /* Otherwise, see if it is equivalent to a phi node in this block. */
1636 result = vn_phi_lookup (phi);
1639 if (TREE_CODE (result) == SSA_NAME)
1640 changed = visit_copy (PHI_RESULT (phi), result);
1642 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1646 vn_phi_insert (phi, PHI_RESULT (phi));
1647 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1648 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1649 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1655 /* Return true if EXPR contains constants. */
1658 expr_has_constants (tree expr)
1660 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1663 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1666 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1667 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1668 /* Constants inside reference ops are rarely interesting, but
1669 it can take a lot of looking to find them. */
1671 case tcc_declaration:
1674 return is_gimple_min_invariant (expr);
1679 /* Replace SSA_NAMES in expr with their value numbers, and return the
1681 This is performed in place. */
1684 valueize_expr (tree expr)
1686 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1689 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1690 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1691 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1694 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1695 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1696 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1697 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1698 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1699 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1707 /* Simplify the binary expression RHS, and return the result if
1711 simplify_binary_expression (tree stmt, tree rhs)
1713 tree result = NULL_TREE;
1714 tree op0 = TREE_OPERAND (rhs, 0);
1715 tree op1 = TREE_OPERAND (rhs, 1);
1717 /* This will not catch every single case we could combine, but will
1718 catch those with constants. The goal here is to simultaneously
1719 combine constants between expressions, but avoid infinite
1720 expansion of expressions during simplification. */
1721 if (TREE_CODE (op0) == SSA_NAME)
1723 if (VN_INFO (op0)->has_constants)
1724 op0 = valueize_expr (VN_INFO (op0)->expr);
1725 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1726 op0 = SSA_VAL (op0);
1729 if (TREE_CODE (op1) == SSA_NAME)
1731 if (VN_INFO (op1)->has_constants)
1732 op1 = valueize_expr (VN_INFO (op1)->expr);
1733 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1734 op1 = SSA_VAL (op1);
1737 /* Avoid folding if nothing changed. */
1738 if (op0 == TREE_OPERAND (rhs, 0)
1739 && op1 == TREE_OPERAND (rhs, 1))
1742 fold_defer_overflow_warnings ();
1744 result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
1746 fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
1749 /* Make sure result is not a complex expression consisting
1750 of operators of operators (IE (a + b) + (a + c))
1751 Otherwise, we will end up with unbounded expressions if
1752 fold does anything at all. */
1753 if (result && valid_gimple_expression_p (result))
1759 /* Simplify the unary expression RHS, and return the result if
1763 simplify_unary_expression (tree rhs)
1765 tree result = NULL_TREE;
1766 tree op0 = TREE_OPERAND (rhs, 0);
1768 if (TREE_CODE (op0) != SSA_NAME)
1771 if (VN_INFO (op0)->has_constants)
1772 op0 = valueize_expr (VN_INFO (op0)->expr);
1773 else if (CONVERT_EXPR_P (rhs)
1774 || TREE_CODE (rhs) == REALPART_EXPR
1775 || TREE_CODE (rhs) == IMAGPART_EXPR
1776 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
1778 /* We want to do tree-combining on conversion-like expressions.
1779 Make sure we feed only SSA_NAMEs or constants to fold though. */
1780 tree tem = valueize_expr (VN_INFO (op0)->expr);
1781 if (UNARY_CLASS_P (tem)
1782 || BINARY_CLASS_P (tem)
1783 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
1784 || TREE_CODE (tem) == SSA_NAME
1785 || is_gimple_min_invariant (tem))
1789 /* Avoid folding if nothing changed, but remember the expression. */
1790 if (op0 == TREE_OPERAND (rhs, 0))
1793 result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
1796 STRIP_USELESS_TYPE_CONVERSION (result);
1797 if (valid_gimple_expression_p (result))
1804 /* Try to simplify RHS using equivalences and constant folding. */
1807 try_to_simplify (tree stmt, tree rhs)
1811 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
1812 in this case, there is no point in doing extra work. */
1813 if (TREE_CODE (rhs) == SSA_NAME)
1816 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1818 case tcc_declaration:
1819 tem = get_symbol_constant_value (rhs);
1825 /* Do not do full-blown reference lookup here, but simplify
1826 reads from constant aggregates. */
1827 tem = fold_const_aggregate_ref (rhs);
1831 /* Fallthrough for some codes that can operate on registers. */
1832 if (!(TREE_CODE (rhs) == REALPART_EXPR
1833 || TREE_CODE (rhs) == IMAGPART_EXPR
1834 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
1836 /* We could do a little more with unary ops, if they expand
1837 into binary ops, but it's debatable whether it is worth it. */
1839 return simplify_unary_expression (rhs);
1841 case tcc_comparison:
1843 return simplify_binary_expression (stmt, rhs);
1852 /* Visit and value number USE, return true if the value number
1856 visit_use (tree use)
1858 bool changed = false;
1859 tree stmt = SSA_NAME_DEF_STMT (use);
1862 VN_INFO (use)->use_processed = true;
1864 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
1865 if (dump_file && (dump_flags & TDF_DETAILS)
1866 && !IS_EMPTY_STMT (stmt))
1868 fprintf (dump_file, "Value numbering ");
1869 print_generic_expr (dump_file, use, 0);
1870 fprintf (dump_file, " stmt = ");
1871 print_generic_stmt (dump_file, stmt, 0);
1874 /* RETURN_EXPR may have an embedded MODIFY_STMT. */
1875 if (TREE_CODE (stmt) == RETURN_EXPR
1876 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1877 stmt = TREE_OPERAND (stmt, 0);
1879 ann = stmt_ann (stmt);
1881 /* Handle uninitialized uses. */
1882 if (IS_EMPTY_STMT (stmt))
1884 changed = set_ssa_val_to (use, use);
1888 if (TREE_CODE (stmt) == PHI_NODE)
1890 changed = visit_phi (stmt);
1892 else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
1893 || (ann && ann->has_volatile_ops)
1894 || tree_could_throw_p (stmt))
1896 changed = defs_to_varying (stmt);
1900 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1901 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1904 STRIP_USELESS_TYPE_CONVERSION (rhs);
1906 /* Shortcut for copies. Simplifying copies is pointless,
1907 since we copy the expression and value they represent. */
1908 if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
1910 changed = visit_copy (lhs, rhs);
1913 simplified = try_to_simplify (stmt, rhs);
1914 if (simplified && simplified != rhs)
1916 if (dump_file && (dump_flags & TDF_DETAILS))
1918 fprintf (dump_file, "RHS ");
1919 print_generic_expr (dump_file, rhs, 0);
1920 fprintf (dump_file, " simplified to ");
1921 print_generic_expr (dump_file, simplified, 0);
1922 if (TREE_CODE (lhs) == SSA_NAME)
1923 fprintf (dump_file, " has constants %d\n",
1924 expr_has_constants (simplified));
1926 fprintf (dump_file, "\n");
1929 /* Setting value numbers to constants will occasionally
1930 screw up phi congruence because constants are not
1931 uniquely associated with a single ssa name that can be
1933 if (simplified && is_gimple_min_invariant (simplified)
1934 && TREE_CODE (lhs) == SSA_NAME
1935 && simplified != rhs)
1937 VN_INFO (lhs)->expr = simplified;
1938 VN_INFO (lhs)->has_constants = true;
1939 changed = set_ssa_val_to (lhs, simplified);
1942 else if (simplified && TREE_CODE (simplified) == SSA_NAME
1943 && TREE_CODE (lhs) == SSA_NAME)
1945 changed = visit_copy (lhs, simplified);
1948 else if (simplified)
1950 if (TREE_CODE (lhs) == SSA_NAME)
1952 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
1953 /* We have to unshare the expression or else
1954 valuizing may change the IL stream. */
1955 VN_INFO (lhs)->expr = unshare_expr (simplified);
1959 else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
1961 VN_INFO (lhs)->has_constants = true;
1962 VN_INFO (lhs)->expr = unshare_expr (rhs);
1964 else if (TREE_CODE (lhs) == SSA_NAME)
1966 /* We reset expr and constantness here because we may
1967 have been value numbering optimistically, and
1968 iterating. They may become non-constant in this case,
1969 even if they were optimistically constant. */
1971 VN_INFO (lhs)->has_constants = false;
1972 VN_INFO (lhs)->expr = lhs;
1975 if (TREE_CODE (lhs) == SSA_NAME
1976 /* We can substitute SSA_NAMEs that are live over
1977 abnormal edges with their constant value. */
1978 && !is_gimple_min_invariant (rhs)
1979 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1980 changed = defs_to_varying (stmt);
1981 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
1983 changed = visit_reference_op_store (lhs, rhs, stmt);
1985 else if (TREE_CODE (lhs) == SSA_NAME)
1987 if (is_gimple_min_invariant (rhs))
1989 VN_INFO (lhs)->has_constants = true;
1990 VN_INFO (lhs)->expr = rhs;
1991 changed = set_ssa_val_to (lhs, rhs);
1995 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
1998 changed = visit_unary_op (lhs, rhs);
2001 changed = visit_binary_op (lhs, rhs);
2003 /* If tcc_vl_expr ever encompasses more than
2004 CALL_EXPR, this will need to be changed. */
2006 if (call_expr_flags (rhs) & (ECF_PURE | ECF_CONST))
2007 changed = visit_reference_op_load (lhs, rhs, stmt);
2009 changed = defs_to_varying (stmt);
2011 case tcc_declaration:
2013 changed = visit_reference_op_load (lhs, rhs, stmt);
2015 case tcc_expression:
2016 if (TREE_CODE (rhs) == ADDR_EXPR)
2018 changed = visit_unary_op (lhs, rhs);
2023 changed = defs_to_varying (stmt);
2029 changed = defs_to_varying (stmt);
2036 /* Compare two operands by reverse postorder index */
2039 compare_ops (const void *pa, const void *pb)
2041 const tree opa = *((const tree *)pa);
2042 const tree opb = *((const tree *)pb);
2043 tree opstmta = SSA_NAME_DEF_STMT (opa);
2044 tree opstmtb = SSA_NAME_DEF_STMT (opb);
2048 if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
2050 else if (IS_EMPTY_STMT (opstmta))
2052 else if (IS_EMPTY_STMT (opstmtb))
2055 bba = bb_for_stmt (opstmta);
2056 bbb = bb_for_stmt (opstmtb);
2067 if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
2069 else if (TREE_CODE (opstmta) == PHI_NODE)
2071 else if (TREE_CODE (opstmtb) == PHI_NODE)
2073 return gimple_stmt_uid (opstmta) - gimple_stmt_uid (opstmtb);
2075 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2078 /* Sort an array containing members of a strongly connected component
2079 SCC so that the members are ordered by RPO number.
2080 This means that when the sort is complete, iterating through the
2081 array will give you the members in RPO order. */
2084 sort_scc (VEC (tree, heap) *scc)
2086 qsort (VEC_address (tree, scc),
2087 VEC_length (tree, scc),
2092 /* Process a strongly connected component in the SSA graph. */
2095 process_scc (VEC (tree, heap) *scc)
2097 /* If the SCC has a single member, just visit it. */
2099 if (VEC_length (tree, scc) == 1)
2101 tree use = VEC_index (tree, scc, 0);
2102 if (!VN_INFO (use)->use_processed)
2109 unsigned int iterations = 0;
2110 bool changed = true;
2112 /* Iterate over the SCC with the optimistic table until it stops
2114 current_info = optimistic_info;
2119 htab_empty (optimistic_info->nary);
2120 htab_empty (optimistic_info->phis);
2121 htab_empty (optimistic_info->references);
2122 obstack_free (&optimistic_info->nary_obstack, NULL);
2123 gcc_obstack_init (&optimistic_info->nary_obstack);
2124 empty_alloc_pool (optimistic_info->phis_pool);
2125 empty_alloc_pool (optimistic_info->references_pool);
2126 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2127 changed |= visit_use (var);
2130 statistics_histogram_event (cfun, "SCC iterations", iterations);
2132 /* Finally, visit the SCC once using the valid table. */
2133 current_info = valid_info;
2134 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2139 DEF_VEC_O(ssa_op_iter);
2140 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2142 /* Pop the components of the found SCC for NAME off the SCC stack
2143 and process them. Returns true if all went well, false if
2144 we run into resource limits. */
2147 extract_and_process_scc_for_name (tree name)
2149 VEC (tree, heap) *scc = NULL;
2152 /* Found an SCC, pop the components off the SCC stack and
2156 x = VEC_pop (tree, sccstack);
2158 VN_INFO (x)->on_sccstack = false;
2159 VEC_safe_push (tree, heap, scc, x);
2160 } while (x != name);
2162 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2163 if (VEC_length (tree, scc)
2164 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2167 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2168 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2169 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2173 if (VEC_length (tree, scc) > 1)
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2177 print_scc (dump_file, scc);
2181 VEC_free (tree, heap, scc);
2186 /* Depth first search on NAME to discover and process SCC's in the SSA
2188 Execution of this algorithm relies on the fact that the SCC's are
2189 popped off the stack in topological order.
2190 Returns true if successful, false if we stopped processing SCC's due
2191 to resource constraints. */
2196 VEC(ssa_op_iter, heap) *itervec = NULL;
2197 VEC(tree, heap) *namevec = NULL;
2198 use_operand_p usep = NULL;
2204 VN_INFO (name)->dfsnum = next_dfs_num++;
2205 VN_INFO (name)->visited = true;
2206 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2208 VEC_safe_push (tree, heap, sccstack, name);
2209 VN_INFO (name)->on_sccstack = true;
2210 defstmt = SSA_NAME_DEF_STMT (name);
2212 /* Recursively DFS on our operands, looking for SCC's. */
2213 if (!IS_EMPTY_STMT (defstmt))
2215 /* Push a new iterator. */
2216 if (TREE_CODE (defstmt) == PHI_NODE)
2217 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2219 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2226 /* If we are done processing uses of a name, go up the stack
2227 of iterators and process SCCs as we found them. */
2228 if (op_iter_done (&iter))
2230 /* See if we found an SCC. */
2231 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2232 if (!extract_and_process_scc_for_name (name))
2234 VEC_free (tree, heap, namevec);
2235 VEC_free (ssa_op_iter, heap, itervec);
2239 /* Check if we are done. */
2240 if (VEC_empty (tree, namevec))
2242 VEC_free (tree, heap, namevec);
2243 VEC_free (ssa_op_iter, heap, itervec);
2247 /* Restore the last use walker and continue walking there. */
2249 name = VEC_pop (tree, namevec);
2250 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2251 sizeof (ssa_op_iter));
2252 VEC_pop (ssa_op_iter, itervec);
2253 goto continue_walking;
2256 use = USE_FROM_PTR (usep);
2258 /* Since we handle phi nodes, we will sometimes get
2259 invariants in the use expression. */
2260 if (TREE_CODE (use) == SSA_NAME)
2262 if (! (VN_INFO (use)->visited))
2264 /* Recurse by pushing the current use walking state on
2265 the stack and starting over. */
2266 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2267 VEC_safe_push(tree, heap, namevec, name);
2272 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2273 VN_INFO (use)->low);
2275 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2276 && VN_INFO (use)->on_sccstack)
2278 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2279 VN_INFO (name)->low);
2283 usep = op_iter_next_use (&iter);
2287 /* Allocate a value number table. */
2290 allocate_vn_table (vn_tables_t table)
2292 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2293 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2294 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2297 gcc_obstack_init (&table->nary_obstack);
2298 table->phis_pool = create_alloc_pool ("VN phis",
2299 sizeof (struct vn_phi_s),
2301 table->references_pool = create_alloc_pool ("VN references",
2302 sizeof (struct vn_reference_s),
2306 /* Free a value number table. */
2309 free_vn_table (vn_tables_t table)
2311 htab_delete (table->phis);
2312 htab_delete (table->nary);
2313 htab_delete (table->references);
2314 obstack_free (&table->nary_obstack, NULL);
2315 free_alloc_pool (table->phis_pool);
2316 free_alloc_pool (table->references_pool);
2324 int *rpo_numbers_temp;
2326 calculate_dominance_info (CDI_DOMINATORS);
2328 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2331 constant_value_ids = BITMAP_ALLOC (NULL);
2336 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2337 /* VEC_alloc doesn't actually grow it to the right size, it just
2338 preallocates the space to do so. */
2339 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2340 gcc_obstack_init (&vn_ssa_aux_obstack);
2342 shared_lookup_phiargs = NULL;
2343 shared_lookup_vops = NULL;
2344 shared_lookup_references = NULL;
2345 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2346 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2347 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2349 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2350 the i'th block in RPO order is bb. We want to map bb's to RPO
2351 numbers, so we need to rearrange this array. */
2352 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2353 rpo_numbers[rpo_numbers_temp[j]] = j;
2355 XDELETE (rpo_numbers_temp);
2357 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2359 /* Create the VN_INFO structures, and initialize value numbers to
2361 for (i = 0; i < num_ssa_names; i++)
2363 tree name = ssa_name (i);
2366 VN_INFO_GET (name)->valnum = VN_TOP;
2367 VN_INFO (name)->expr = name;
2368 VN_INFO (name)->value_id = 0;
2372 renumber_gimple_stmt_uids ();
2374 /* Create the valid and optimistic value numbering tables. */
2375 valid_info = XCNEW (struct vn_tables_s);
2376 allocate_vn_table (valid_info);
2377 optimistic_info = XCNEW (struct vn_tables_s);
2378 allocate_vn_table (optimistic_info);
2383 switch_to_PRE_table (void)
2385 pre_info = XCNEW (struct vn_tables_s);
2386 allocate_vn_table (pre_info);
2387 current_info = pre_info;
2395 htab_delete (constant_to_value_id);
2396 BITMAP_FREE (constant_value_ids);
2397 VEC_free (tree, heap, shared_lookup_phiargs);
2398 VEC_free (tree, gc, shared_lookup_vops);
2399 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2400 XDELETEVEC (rpo_numbers);
2402 for (i = 0; i < num_ssa_names; i++)
2404 tree name = ssa_name (i);
2406 && SSA_NAME_VALUE (name))
2407 SSA_NAME_VALUE (name) = NULL;
2409 && VN_INFO (name)->needs_insertion)
2410 release_ssa_name (name);
2412 obstack_free (&vn_ssa_aux_obstack, NULL);
2413 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2415 VEC_free (tree, heap, sccstack);
2416 free_vn_table (valid_info);
2417 XDELETE (valid_info);
2418 free_vn_table (optimistic_info);
2419 XDELETE (optimistic_info);
2422 free_vn_table (pre_info);
2427 /* Set the value ids in the valid/optimistic hash tables. */
2430 set_hashtable_value_ids (void)
2437 /* Now set the value ids of the things we had put in the hash
2440 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2441 vno, vn_nary_op_t, hi)
2445 if (TREE_CODE (vno->result) == SSA_NAME)
2446 vno->value_id = VN_INFO (vno->result)->value_id;
2447 else if (is_gimple_min_invariant (vno->result))
2448 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2452 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary,
2453 vno, vn_nary_op_t, hi)
2457 if (TREE_CODE (vno->result) == SSA_NAME)
2458 vno->value_id = VN_INFO (vno->result)->value_id;
2459 else if (is_gimple_min_invariant (vno->result))
2460 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2464 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2469 if (TREE_CODE (vp->result) == SSA_NAME)
2470 vp->value_id = VN_INFO (vp->result)->value_id;
2471 else if (is_gimple_min_invariant (vp->result))
2472 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2475 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis,
2480 if (TREE_CODE (vp->result) == SSA_NAME)
2481 vp->value_id = VN_INFO (vp->result)->value_id;
2482 else if (is_gimple_min_invariant (vp->result))
2483 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2488 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2489 vr, vn_reference_t, hi)
2493 if (TREE_CODE (vr->result) == SSA_NAME)
2494 vr->value_id = VN_INFO (vr->result)->value_id;
2495 else if (is_gimple_min_invariant (vr->result))
2496 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2499 FOR_EACH_HTAB_ELEMENT (optimistic_info->references,
2500 vr, vn_reference_t, hi)
2504 if (TREE_CODE (vr->result) == SSA_NAME)
2505 vr->value_id = VN_INFO (vr->result)->value_id;
2506 else if (is_gimple_min_invariant (vr->result))
2507 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2512 /* Do SCCVN. Returns true if it finished, false if we bailed out
2513 due to resource constraints. */
2516 run_scc_vn (bool may_insert_arg)
2520 bool changed = true;
2522 may_insert = may_insert_arg;
2525 current_info = valid_info;
2527 for (param = DECL_ARGUMENTS (current_function_decl);
2529 param = TREE_CHAIN (param))
2531 if (gimple_default_def (cfun, param) != NULL)
2533 tree def = gimple_default_def (cfun, param);
2534 SSA_VAL (def) = def;
2538 for (i = 1; i < num_ssa_names; ++i)
2540 tree name = ssa_name (i);
2542 && VN_INFO (name)->visited == false
2543 && !has_zero_uses (name))
2552 /* Initialize the value ids. */
2554 for (i = 1; i < num_ssa_names; ++i)
2556 tree name = ssa_name (i);
2560 info = VN_INFO (name);
2561 if (info->valnum == name)
2562 info->value_id = get_next_value_id ();
2563 else if (is_gimple_min_invariant (info->valnum))
2564 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2567 /* Propagate until they stop changing. */
2571 for (i = 1; i < num_ssa_names; ++i)
2573 tree name = ssa_name (i);
2577 info = VN_INFO (name);
2578 if (TREE_CODE (info->valnum) == SSA_NAME
2579 && info->valnum != name
2580 && TREE_CODE (info->valnum) == SSA_NAME
2581 && info->value_id != VN_INFO (info->valnum)->value_id)
2584 info->value_id = VN_INFO (info->valnum)->value_id;
2589 set_hashtable_value_ids ();
2591 if (dump_file && (dump_flags & TDF_DETAILS))
2593 fprintf (dump_file, "Value numbers:\n");
2594 for (i = 0; i < num_ssa_names; i++)
2596 tree name = ssa_name (i);
2597 if (name && VN_INFO (name)->visited
2598 && (SSA_VAL (name) != name
2599 || is_gimple_min_invariant (VN_INFO (name)->expr)))
2601 print_generic_expr (dump_file, name, 0);
2602 fprintf (dump_file, " = ");
2603 if (is_gimple_min_invariant (VN_INFO (name)->expr))
2604 print_generic_expr (dump_file, VN_INFO (name)->expr, 0);
2606 print_generic_expr (dump_file, SSA_VAL (name), 0);
2607 fprintf (dump_file, "\n");
2616 /* Return the maximum value id we have ever seen. */
2619 get_max_value_id (void)
2621 return next_value_id;
2624 /* Return the next unique value id. */
2627 get_next_value_id (void)
2629 return next_value_id++;
2633 /* Compare two expressions E1 and E2 and return true if they are
2637 expressions_equal_p (tree e1, tree e2)
2644 te1 = TREE_TYPE (e1);
2645 te2 = TREE_TYPE (e2);
2647 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2651 for (lop1 = e1, lop2 = e2;
2653 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2657 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2663 else if (TREE_CODE (e1) == TREE_CODE (e2)
2664 && operand_equal_p (e1, e2, OEP_PURE_SAME))
2670 /* Sort the VUSE array so that we can do equality comparisons
2671 quicker on two vuse vecs. */
2674 sort_vuses (VEC (tree,gc) *vuses)
2676 if (VEC_length (tree, vuses) > 1)
2677 qsort (VEC_address (tree, vuses),
2678 VEC_length (tree, vuses),
2683 /* Sort the VUSE array so that we can do equality comparisons
2684 quicker on two vuse vecs. */
2687 sort_vuses_heap (VEC (tree,heap) *vuses)
2689 if (VEC_length (tree, vuses) > 1)
2690 qsort (VEC_address (tree, vuses),
2691 VEC_length (tree, vuses),