1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
283 if (expr == NULL_TREE)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 if (vc1->hashcode != vc2->hashcode)
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
325 /* Hash table hash function for vn_constant_t. */
328 vn_constant_hash (const void *p1)
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
334 /* Lookup a value id for CONSTANT and return it. If it does not
338 get_constant_value_id (tree constant)
341 struct vn_constant_s vc;
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
348 return ((vn_constant_t)*slot)->value_id;
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
356 get_or_alloc_constant_value_id (tree constant)
359 vn_constant_t vc = XNEW (struct vn_constant_s);
361 vc->hashcode = vn_hash_constant_with_type (constant);
362 vc->constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
364 vc->hashcode, INSERT);
368 return ((vn_constant_t)*slot)->value_id;
370 vc->value_id = get_next_value_id ();
372 bitmap_set_bit (constant_value_ids, vc->value_id);
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
405 hashval_t result = 0;
407 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 result += iterative_hash_expr (vro1->op2, vro1->opcode);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
432 vn_reference_op_t vro;
434 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
435 result += iterative_hash_expr (v, 0);
436 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 result += vn_reference_op_compute_hash (vro);
442 /* Return true if reference operations P1 and P2 are equivalent. This
443 means they have the same set of operands and vuses. */
446 vn_reference_eq (const void *p1, const void *p2)
450 vn_reference_op_t vro;
452 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
453 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
454 if (vr1->hashcode != vr2->hashcode)
457 if (vr1->vuses == vr2->vuses
458 && vr1->operands == vr2->operands)
461 /* Impossible for them to be equivalent if they have different
463 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
466 /* We require that address operands be canonicalized in a way that
467 two memory references will have the same operands if they are
469 if (VEC_length (vn_reference_op_s, vr1->operands)
470 != VEC_length (vn_reference_op_s, vr2->operands))
473 /* The memory state is more often different than the address of the
474 store/load, so check it first. */
475 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
477 if (VEC_index (tree, vr2->vuses, i) != v)
481 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
483 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
490 /* Place the vuses from STMT into *result. */
493 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
501 VEC_reserve_exact (tree, gc, *result,
502 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
504 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
505 VEC_quick_push (tree, *result, vuse);
509 /* Copy the VUSE names in STMT into a vector, and return
512 static VEC (tree, gc) *
513 copy_vuses_from_stmt (gimple stmt)
515 VEC (tree, gc) *vuses = NULL;
517 vuses_to_vec (stmt, &vuses);
522 /* Place the vdefs from STMT into *result. */
525 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
533 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
535 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
536 VEC_quick_push (tree, *result, vdef);
539 /* Copy the names of vdef results in STMT into a vector, and return
542 static VEC (tree, gc) *
543 copy_vdefs_from_stmt (gimple stmt)
545 VEC (tree, gc) *vdefs = NULL;
547 vdefs_to_vec (stmt, &vdefs);
552 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
553 static VEC (tree, gc) *shared_lookup_vops;
555 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
556 This function will overwrite the current SHARED_LOOKUP_VOPS
560 shared_vuses_from_stmt (gimple stmt)
562 VEC_truncate (tree, shared_lookup_vops, 0);
563 vuses_to_vec (stmt, &shared_lookup_vops);
565 return shared_lookup_vops;
568 /* Copy the operations present in load/store REF into RESULT, a vector of
569 vn_reference_op_s's. */
572 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
574 if (TREE_CODE (ref) == TARGET_MEM_REF)
576 vn_reference_op_s temp;
578 memset (&temp, 0, sizeof (temp));
579 /* We do not care for spurious type qualifications. */
580 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
581 temp.opcode = TREE_CODE (ref);
582 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
583 temp.op1 = TMR_INDEX (ref);
584 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 memset (&temp, 0, sizeof (temp));
587 temp.type = NULL_TREE;
588 temp.opcode = TREE_CODE (ref);
589 temp.op0 = TMR_STEP (ref);
590 temp.op1 = TMR_OFFSET (ref);
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
595 /* For non-calls, store the information that makes up the address. */
599 vn_reference_op_s temp;
601 memset (&temp, 0, sizeof (temp));
602 /* We do not care for spurious type qualifications. */
603 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
604 temp.opcode = TREE_CODE (ref);
608 case ALIGN_INDIRECT_REF:
610 /* The only operand is the address, which gets its own
611 vn_reference_op_s structure. */
613 case MISALIGNED_INDIRECT_REF:
614 temp.op0 = TREE_OPERAND (ref, 1);
617 /* Record bits and position. */
618 temp.op0 = TREE_OPERAND (ref, 1);
619 temp.op1 = TREE_OPERAND (ref, 2);
622 /* The field decl is enough to unambiguously specify the field,
623 a matching type is not necessary and a mismatching type
624 is always a spurious difference. */
625 temp.type = NULL_TREE;
626 /* If this is a reference to a union member, record the union
627 member size as operand. Do so only if we are doing
628 expression insertion (during FRE), as PRE currently gets
629 confused with this. */
631 && TREE_OPERAND (ref, 2) == NULL_TREE
632 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
633 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
634 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
635 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
638 /* Record field as operand. */
639 temp.op0 = TREE_OPERAND (ref, 1);
640 temp.op1 = TREE_OPERAND (ref, 2);
643 case ARRAY_RANGE_REF:
645 /* Record index as operand. */
646 temp.op0 = TREE_OPERAND (ref, 1);
647 temp.op1 = TREE_OPERAND (ref, 2);
648 temp.op2 = TREE_OPERAND (ref, 3);
664 if (is_gimple_min_invariant (ref))
670 /* These are only interesting for their operands, their
671 existence, and their type. They will never be the last
672 ref in the chain of references (IE they require an
673 operand), so we don't have to put anything
674 for op* as it will be handled by the iteration */
677 case VIEW_CONVERT_EXPR:
682 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
684 if (REFERENCE_CLASS_P (ref)
685 || (TREE_CODE (ref) == ADDR_EXPR
686 && !is_gimple_min_invariant (ref)))
687 ref = TREE_OPERAND (ref, 0);
693 /* Re-create a reference tree from the reference ops OPS.
694 Returns NULL_TREE if the ops were not handled.
695 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
698 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
700 vn_reference_op_t op;
702 tree ref, *op0_p = &ref;
704 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
711 case ALIGN_INDIRECT_REF:
713 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
714 op0_p = &TREE_OPERAND (*op0_p, 0);
717 case MISALIGNED_INDIRECT_REF:
718 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
720 op0_p = &TREE_OPERAND (*op0_p, 0);
724 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
726 op0_p = &TREE_OPERAND (*op0_p, 0);
730 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
732 op0_p = &TREE_OPERAND (*op0_p, 0);
735 case ARRAY_RANGE_REF:
737 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
738 op->op0, op->op1, op->op2);
739 op0_p = &TREE_OPERAND (*op0_p, 0);
757 if (op->op0 != NULL_TREE)
759 gcc_assert (is_gimple_min_invariant (op->op0));
766 case VIEW_CONVERT_EXPR:
767 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
768 op0_p = &TREE_OPERAND (*op0_p, 0);
779 /* Copy the operations present in load/store/call REF into RESULT, a vector of
780 vn_reference_op_s's. */
783 copy_reference_ops_from_call (gimple call,
784 VEC(vn_reference_op_s, heap) **result)
786 vn_reference_op_s temp;
789 /* Copy the type, opcode, function being called and static chain. */
790 memset (&temp, 0, sizeof (temp));
791 temp.type = gimple_call_return_type (call);
792 temp.opcode = CALL_EXPR;
793 temp.op0 = gimple_call_fn (call);
794 temp.op1 = gimple_call_chain (call);
795 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
797 /* Copy the call arguments. As they can be references as well,
798 just chain them together. */
799 for (i = 0; i < gimple_call_num_args (call); ++i)
801 tree callarg = gimple_call_arg (call, i);
802 copy_reference_ops_from_ref (callarg, result);
806 /* Create a vector of vn_reference_op_s structures from REF, a
807 REFERENCE_CLASS_P tree. The vector is not shared. */
809 static VEC(vn_reference_op_s, heap) *
810 create_reference_ops_from_ref (tree ref)
812 VEC (vn_reference_op_s, heap) *result = NULL;
814 copy_reference_ops_from_ref (ref, &result);
818 /* Create a vector of vn_reference_op_s structures from CALL, a
819 call statement. The vector is not shared. */
821 static VEC(vn_reference_op_s, heap) *
822 create_reference_ops_from_call (gimple call)
824 VEC (vn_reference_op_s, heap) *result = NULL;
826 copy_reference_ops_from_call (call, &result);
830 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
832 /* Create a vector of vn_reference_op_s structures from REF, a
833 REFERENCE_CLASS_P tree. The vector is shared among all callers of
836 static VEC(vn_reference_op_s, heap) *
837 shared_reference_ops_from_ref (tree ref)
841 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
842 copy_reference_ops_from_ref (ref, &shared_lookup_references);
843 return shared_lookup_references;
846 /* Create a vector of vn_reference_op_s structures from CALL, a
847 call statement. The vector is shared among all callers of
850 static VEC(vn_reference_op_s, heap) *
851 shared_reference_ops_from_call (gimple call)
855 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
856 copy_reference_ops_from_call (call, &shared_lookup_references);
857 return shared_lookup_references;
861 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
862 structures into their value numbers. This is done in-place, and
863 the vector passed in is returned. */
865 static VEC (vn_reference_op_s, heap) *
866 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
868 vn_reference_op_t vro;
871 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
873 if (vro->opcode == SSA_NAME
874 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
876 vro->op0 = SSA_VAL (vro->op0);
877 /* If it transforms from an SSA_NAME to a constant, update
879 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
880 vro->opcode = TREE_CODE (vro->op0);
882 /* TODO: Do we want to valueize op2 and op1 of
883 ARRAY_REF/COMPONENT_REF for Ada */
890 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
891 their value numbers. This is done in-place, and the vector passed
894 static VEC (tree, gc) *
895 valueize_vuses (VEC (tree, gc) *orig)
897 bool made_replacement = false;
901 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
903 if (vuse != SSA_VAL (vuse))
905 made_replacement = true;
906 VEC_replace (tree, orig, i, SSA_VAL (vuse));
910 if (made_replacement && VEC_length (tree, orig) > 1)
916 /* Return the single reference statement defining all virtual uses
917 in VUSES or NULL_TREE, if there are multiple defining statements.
918 Take into account only definitions that alias REF if following
922 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
928 gcc_assert (VEC_length (tree, vuses) >= 1);
930 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
931 if (gimple_code (def_stmt) == GIMPLE_PHI)
933 /* We can only handle lookups over PHI nodes for a single
935 if (VEC_length (tree, vuses) == 1)
937 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
944 /* Verify each VUSE reaches the same defining stmt. */
945 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
947 gimple tmp = SSA_NAME_DEF_STMT (vuse);
952 /* Now see if the definition aliases ref, and loop until it does. */
955 && is_gimple_assign (def_stmt)
956 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
957 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
962 /* Lookup a SCCVN reference operation VR in the current hash table.
963 Returns the resulting value number if it exists in the hash table,
964 NULL_TREE otherwise. VNRESULT will be filled in with the actual
965 vn_reference_t stored in the hashtable if something is found. */
968 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
974 slot = htab_find_slot_with_hash (current_info->references, vr,
976 if (!slot && current_info == optimistic_info)
977 slot = htab_find_slot_with_hash (valid_info->references, vr,
982 *vnresult = (vn_reference_t)*slot;
983 return ((vn_reference_t)*slot)->result;
990 /* Lookup a reference operation by it's parts, in the current hash table.
991 Returns the resulting value number if it exists in the hash table,
992 NULL_TREE otherwise. VNRESULT will be filled in with the actual
993 vn_reference_t stored in the hashtable if something is found. */
996 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
997 VEC (vn_reference_op_s, heap) *operands,
998 vn_reference_t *vnresult, bool maywalk)
1000 struct vn_reference_s vr1;
1005 vr1.vuses = valueize_vuses (vuses);
1006 vr1.operands = valueize_refs (operands);
1007 vr1.hashcode = vn_reference_compute_hash (&vr1);
1008 result = vn_reference_lookup_1 (&vr1, vnresult);
1010 /* If there is a single defining statement for all virtual uses, we can
1011 use that, following virtual use-def chains. */
1015 && VEC_length (tree, vr1.vuses) >= 1)
1017 tree ref = get_ref_from_reference_ops (operands);
1020 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1021 && is_gimple_assign (def_stmt))
1023 /* We are now at an aliasing definition for the vuses we want to
1024 look up. Re-do the lookup with the vdefs for this stmt. */
1025 vdefs_to_vec (def_stmt, &vuses);
1026 vr1.vuses = valueize_vuses (vuses);
1027 vr1.hashcode = vn_reference_compute_hash (&vr1);
1028 result = vn_reference_lookup_1 (&vr1, vnresult);
1035 /* Lookup OP in the current hash table, and return the resulting value
1036 number if it exists in the hash table. Return NULL_TREE if it does
1037 not exist in the hash table or if the result field of the structure
1038 was NULL.. VNRESULT will be filled in with the vn_reference_t
1039 stored in the hashtable if one exists. */
1042 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1043 vn_reference_t *vnresult)
1045 struct vn_reference_s vr1;
1051 vr1.vuses = valueize_vuses (vuses);
1052 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1053 vr1.hashcode = vn_reference_compute_hash (&vr1);
1054 result = vn_reference_lookup_1 (&vr1, vnresult);
1056 /* If there is a single defining statement for all virtual uses, we can
1057 use that, following virtual use-def chains. */
1061 && VEC_length (tree, vr1.vuses) >= 1
1062 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1063 && is_gimple_assign (def_stmt))
1065 /* We are now at an aliasing definition for the vuses we want to
1066 look up. Re-do the lookup with the vdefs for this stmt. */
1067 vdefs_to_vec (def_stmt, &vuses);
1068 vr1.vuses = valueize_vuses (vuses);
1069 vr1.hashcode = vn_reference_compute_hash (&vr1);
1070 result = vn_reference_lookup_1 (&vr1, vnresult);
1077 /* Insert OP into the current hash table with a value number of
1078 RESULT, and return the resulting reference structure we created. */
1081 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1086 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1087 if (TREE_CODE (result) == SSA_NAME)
1088 vr1->value_id = VN_INFO (result)->value_id;
1090 vr1->value_id = get_or_alloc_constant_value_id (result);
1091 vr1->vuses = valueize_vuses (vuses);
1092 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1093 vr1->hashcode = vn_reference_compute_hash (vr1);
1094 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1096 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1099 /* Because we lookup stores using vuses, and value number failures
1100 using the vdefs (see visit_reference_op_store for how and why),
1101 it's possible that on failure we may try to insert an already
1102 inserted store. This is not wrong, there is no ssa name for a
1103 store that we could use as a differentiator anyway. Thus, unlike
1104 the other lookup functions, you cannot gcc_assert (!*slot)
1107 /* But free the old slot in case of a collision. */
1109 free_reference (*slot);
1115 /* Insert a reference by it's pieces into the current hash table with
1116 a value number of RESULT. Return the resulting reference
1117 structure we created. */
1120 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1121 VEC (vn_reference_op_s, heap) *operands,
1122 tree result, unsigned int value_id)
1128 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1129 vr1->value_id = value_id;
1130 vr1->vuses = valueize_vuses (vuses);
1131 vr1->operands = valueize_refs (operands);
1132 vr1->hashcode = vn_reference_compute_hash (vr1);
1133 if (result && TREE_CODE (result) == SSA_NAME)
1134 result = SSA_VAL (result);
1135 vr1->result = result;
1137 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1140 /* At this point we should have all the things inserted that we have
1141 seen before, and we should never try inserting something that
1143 gcc_assert (!*slot);
1145 free_reference (*slot);
1151 /* Compute and return the hash value for nary operation VBO1. */
1154 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1159 for (i = 0; i < vno1->length; ++i)
1160 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1161 vno1->op[i] = SSA_VAL (vno1->op[i]);
1163 if (vno1->length == 2
1164 && commutative_tree_code (vno1->opcode)
1165 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1167 tree temp = vno1->op[0];
1168 vno1->op[0] = vno1->op[1];
1172 for (i = 0; i < vno1->length; ++i)
1173 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1178 /* Return the computed hashcode for nary operation P1. */
1181 vn_nary_op_hash (const void *p1)
1183 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1184 return vno1->hashcode;
1187 /* Compare nary operations P1 and P2 and return true if they are
1191 vn_nary_op_eq (const void *p1, const void *p2)
1193 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1194 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1197 if (vno1->hashcode != vno2->hashcode)
1200 if (vno1->opcode != vno2->opcode
1201 || !types_compatible_p (vno1->type, vno2->type))
1204 for (i = 0; i < vno1->length; ++i)
1205 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1211 /* Lookup a n-ary operation by its pieces and return the resulting value
1212 number if it exists in the hash table. Return NULL_TREE if it does
1213 not exist in the hash table or if the result field of the operation
1214 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1218 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1219 tree type, tree op0, tree op1, tree op2,
1220 tree op3, vn_nary_op_t *vnresult)
1223 struct vn_nary_op_s vno1;
1227 vno1.length = length;
1233 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1234 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1236 if (!slot && current_info == optimistic_info)
1237 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1242 *vnresult = (vn_nary_op_t)*slot;
1243 return ((vn_nary_op_t)*slot)->result;
1246 /* Lookup OP in the current hash table, and return the resulting value
1247 number if it exists in the hash table. Return NULL_TREE if it does
1248 not exist in the hash table or if the result field of the operation
1249 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1253 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1256 struct vn_nary_op_s vno1;
1261 vno1.opcode = TREE_CODE (op);
1262 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1263 vno1.type = TREE_TYPE (op);
1264 for (i = 0; i < vno1.length; ++i)
1265 vno1.op[i] = TREE_OPERAND (op, i);
1266 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1267 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1269 if (!slot && current_info == optimistic_info)
1270 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1275 *vnresult = (vn_nary_op_t)*slot;
1276 return ((vn_nary_op_t)*slot)->result;
1279 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1280 value number if it exists in the hash table. Return NULL_TREE if
1281 it does not exist in the hash table. VNRESULT will contain the
1282 vn_nary_op_t from the hashtable if it exists. */
1285 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1288 struct vn_nary_op_s vno1;
1293 vno1.opcode = gimple_assign_rhs_code (stmt);
1294 vno1.length = gimple_num_ops (stmt) - 1;
1295 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1296 for (i = 0; i < vno1.length; ++i)
1297 vno1.op[i] = gimple_op (stmt, i + 1);
1298 if (vno1.opcode == REALPART_EXPR
1299 || vno1.opcode == IMAGPART_EXPR
1300 || vno1.opcode == VIEW_CONVERT_EXPR)
1301 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1302 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1303 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1305 if (!slot && current_info == optimistic_info)
1306 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1311 *vnresult = (vn_nary_op_t)*slot;
1312 return ((vn_nary_op_t)*slot)->result;
1315 /* Insert a n-ary operation into the current hash table using it's
1316 pieces. Return the vn_nary_op_t structure we created and put in
1320 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1321 tree type, tree op0,
1322 tree op1, tree op2, tree op3,
1324 unsigned int value_id)
1329 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1330 (sizeof (struct vn_nary_op_s)
1331 - sizeof (tree) * (4 - length)));
1332 vno1->value_id = value_id;
1333 vno1->opcode = code;
1334 vno1->length = length;
1344 vno1->result = result;
1345 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1346 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1348 gcc_assert (!*slot);
1355 /* Insert OP into the current hash table with a value number of
1356 RESULT. Return the vn_nary_op_t structure we created and put in
1360 vn_nary_op_insert (tree op, tree result)
1362 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1367 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1368 (sizeof (struct vn_nary_op_s)
1369 - sizeof (tree) * (4 - length)));
1370 vno1->value_id = VN_INFO (result)->value_id;
1371 vno1->opcode = TREE_CODE (op);
1372 vno1->length = length;
1373 vno1->type = TREE_TYPE (op);
1374 for (i = 0; i < vno1->length; ++i)
1375 vno1->op[i] = TREE_OPERAND (op, i);
1376 vno1->result = result;
1377 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1378 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1380 gcc_assert (!*slot);
1386 /* Insert the rhs of STMT into the current hash table with a value number of
1390 vn_nary_op_insert_stmt (gimple stmt, tree result)
1392 unsigned length = gimple_num_ops (stmt) - 1;
1397 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1398 (sizeof (struct vn_nary_op_s)
1399 - sizeof (tree) * (4 - length)));
1400 vno1->value_id = VN_INFO (result)->value_id;
1401 vno1->opcode = gimple_assign_rhs_code (stmt);
1402 vno1->length = length;
1403 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1404 for (i = 0; i < vno1->length; ++i)
1405 vno1->op[i] = gimple_op (stmt, i + 1);
1406 if (vno1->opcode == REALPART_EXPR
1407 || vno1->opcode == IMAGPART_EXPR
1408 || vno1->opcode == VIEW_CONVERT_EXPR)
1409 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1410 vno1->result = result;
1411 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1412 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1414 gcc_assert (!*slot);
1420 /* Compute a hashcode for PHI operation VP1 and return it. */
1422 static inline hashval_t
1423 vn_phi_compute_hash (vn_phi_t vp1)
1425 hashval_t result = 0;
1430 result = vp1->block->index;
1432 /* If all PHI arguments are constants we need to distinguish
1433 the PHI node via its type. */
1434 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1435 result += (INTEGRAL_TYPE_P (type)
1436 + (INTEGRAL_TYPE_P (type)
1437 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1439 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1441 if (phi1op == VN_TOP)
1443 result += iterative_hash_expr (phi1op, result);
1449 /* Return the computed hashcode for phi operation P1. */
1452 vn_phi_hash (const void *p1)
1454 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1455 return vp1->hashcode;
1458 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1461 vn_phi_eq (const void *p1, const void *p2)
1463 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1464 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1466 if (vp1->hashcode != vp2->hashcode)
1469 if (vp1->block == vp2->block)
1474 /* If the PHI nodes do not have compatible types
1475 they are not the same. */
1476 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1477 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1480 /* Any phi in the same block will have it's arguments in the
1481 same edge order, because of how we store phi nodes. */
1482 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1484 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1485 if (phi1op == VN_TOP || phi2op == VN_TOP)
1487 if (!expressions_equal_p (phi1op, phi2op))
1495 static VEC(tree, heap) *shared_lookup_phiargs;
1497 /* Lookup PHI in the current hash table, and return the resulting
1498 value number if it exists in the hash table. Return NULL_TREE if
1499 it does not exist in the hash table. */
1502 vn_phi_lookup (gimple phi)
1505 struct vn_phi_s vp1;
1508 VEC_truncate (tree, shared_lookup_phiargs, 0);
1510 /* Canonicalize the SSA_NAME's to their value number. */
1511 for (i = 0; i < gimple_phi_num_args (phi); i++)
1513 tree def = PHI_ARG_DEF (phi, i);
1514 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1515 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1517 vp1.phiargs = shared_lookup_phiargs;
1518 vp1.block = gimple_bb (phi);
1519 vp1.hashcode = vn_phi_compute_hash (&vp1);
1520 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1522 if (!slot && current_info == optimistic_info)
1523 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1527 return ((vn_phi_t)*slot)->result;
1530 /* Insert PHI into the current hash table with a value number of
1534 vn_phi_insert (gimple phi, tree result)
1537 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1539 VEC (tree, heap) *args = NULL;
1541 /* Canonicalize the SSA_NAME's to their value number. */
1542 for (i = 0; i < gimple_phi_num_args (phi); i++)
1544 tree def = PHI_ARG_DEF (phi, i);
1545 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1546 VEC_safe_push (tree, heap, args, def);
1548 vp1->value_id = VN_INFO (result)->value_id;
1549 vp1->phiargs = args;
1550 vp1->block = gimple_bb (phi);
1551 vp1->result = result;
1552 vp1->hashcode = vn_phi_compute_hash (vp1);
1554 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1557 /* Because we iterate over phi operations more than once, it's
1558 possible the slot might already exist here, hence no assert.*/
1564 /* Print set of components in strongly connected component SCC to OUT. */
1567 print_scc (FILE *out, VEC (tree, heap) *scc)
1572 fprintf (out, "SCC consists of: ");
1573 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1575 print_generic_expr (out, var, 0);
1578 fprintf (out, "\n");
1581 /* Set the value number of FROM to TO, return true if it has changed
1585 set_ssa_val_to (tree from, tree to)
1590 && TREE_CODE (to) == SSA_NAME
1591 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1594 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1595 and invariants. So assert that here. */
1596 gcc_assert (to != NULL_TREE
1598 || TREE_CODE (to) == SSA_NAME
1599 || is_gimple_min_invariant (to)));
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1603 fprintf (dump_file, "Setting value number of ");
1604 print_generic_expr (dump_file, from, 0);
1605 fprintf (dump_file, " to ");
1606 print_generic_expr (dump_file, to, 0);
1609 currval = SSA_VAL (from);
1611 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1613 SSA_VAL (from) = to;
1614 if (dump_file && (dump_flags & TDF_DETAILS))
1615 fprintf (dump_file, " (changed)\n");
1618 if (dump_file && (dump_flags & TDF_DETAILS))
1619 fprintf (dump_file, "\n");
1623 /* Set all definitions in STMT to value number to themselves.
1624 Return true if a value number changed. */
1627 defs_to_varying (gimple stmt)
1629 bool changed = false;
1633 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1635 tree def = DEF_FROM_PTR (defp);
1637 VN_INFO (def)->use_processed = true;
1638 changed |= set_ssa_val_to (def, def);
1643 static bool expr_has_constants (tree expr);
1644 static tree valueize_expr (tree expr);
1646 /* Visit a copy between LHS and RHS, return true if the value number
1650 visit_copy (tree lhs, tree rhs)
1652 /* Follow chains of copies to their destination. */
1653 while (TREE_CODE (rhs) == SSA_NAME
1654 && SSA_VAL (rhs) != rhs)
1655 rhs = SSA_VAL (rhs);
1657 /* The copy may have a more interesting constant filled expression
1658 (we don't, since we know our RHS is just an SSA name). */
1659 if (TREE_CODE (rhs) == SSA_NAME)
1661 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1662 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1665 return set_ssa_val_to (lhs, rhs);
1668 /* Visit a unary operator RHS, value number it, and return true if the
1669 value number of LHS has changed as a result. */
1672 visit_unary_op (tree lhs, gimple stmt)
1674 bool changed = false;
1675 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1679 changed = set_ssa_val_to (lhs, result);
1683 changed = set_ssa_val_to (lhs, lhs);
1684 vn_nary_op_insert_stmt (stmt, lhs);
1690 /* Visit a binary operator RHS, value number it, and return true if the
1691 value number of LHS has changed as a result. */
1694 visit_binary_op (tree lhs, gimple stmt)
1696 bool changed = false;
1697 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1701 changed = set_ssa_val_to (lhs, result);
1705 changed = set_ssa_val_to (lhs, lhs);
1706 vn_nary_op_insert_stmt (stmt, lhs);
1712 /* Visit a call STMT storing into LHS. Return true if the value number
1713 of the LHS has changed as a result. */
1716 visit_reference_op_call (tree lhs, gimple stmt)
1718 bool changed = false;
1719 struct vn_reference_s vr1;
1722 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1723 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1724 vr1.hashcode = vn_reference_compute_hash (&vr1);
1725 result = vn_reference_lookup_1 (&vr1, NULL);
1728 changed = set_ssa_val_to (lhs, result);
1729 if (TREE_CODE (result) == SSA_NAME
1730 && VN_INFO (result)->has_constants)
1731 VN_INFO (lhs)->has_constants = true;
1737 changed = set_ssa_val_to (lhs, lhs);
1738 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1739 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1740 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1741 vr2->hashcode = vr1.hashcode;
1743 slot = htab_find_slot_with_hash (current_info->references,
1744 vr2, vr2->hashcode, INSERT);
1746 free_reference (*slot);
1753 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1754 and return true if the value number of the LHS has changed as a result. */
1757 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1759 bool changed = false;
1760 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1763 /* We handle type-punning through unions by value-numbering based
1764 on offset and size of the access. Be prepared to handle a
1765 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1767 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1769 /* We will be setting the value number of lhs to the value number
1770 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1771 So first simplify and lookup this expression to see if it
1772 is already available. */
1773 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1774 if ((CONVERT_EXPR_P (val)
1775 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1776 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1778 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1779 if ((CONVERT_EXPR_P (tem)
1780 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1781 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1782 TREE_TYPE (val), tem)))
1786 if (!is_gimple_min_invariant (val)
1787 && TREE_CODE (val) != SSA_NAME)
1788 result = vn_nary_op_lookup (val, NULL);
1789 /* If the expression is not yet available, value-number lhs to
1790 a new SSA_NAME we create. */
1791 if (!result && may_insert)
1793 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1794 /* Initialize value-number information properly. */
1795 VN_INFO_GET (result)->valnum = result;
1796 VN_INFO (result)->value_id = get_next_value_id ();
1797 VN_INFO (result)->expr = val;
1798 VN_INFO (result)->has_constants = expr_has_constants (val);
1799 VN_INFO (result)->needs_insertion = true;
1800 /* As all "inserted" statements are singleton SCCs, insert
1801 to the valid table. This is strictly needed to
1802 avoid re-generating new value SSA_NAMEs for the same
1803 expression during SCC iteration over and over (the
1804 optimistic table gets cleared after each iteration).
1805 We do not need to insert into the optimistic table, as
1806 lookups there will fall back to the valid table. */
1807 if (current_info == optimistic_info)
1809 current_info = valid_info;
1810 vn_nary_op_insert (val, result);
1811 current_info = optimistic_info;
1814 vn_nary_op_insert (val, result);
1815 if (dump_file && (dump_flags & TDF_DETAILS))
1817 fprintf (dump_file, "Inserting name ");
1818 print_generic_expr (dump_file, result, 0);
1819 fprintf (dump_file, " for expression ");
1820 print_generic_expr (dump_file, val, 0);
1821 fprintf (dump_file, "\n");
1828 changed = set_ssa_val_to (lhs, result);
1829 if (TREE_CODE (result) == SSA_NAME
1830 && VN_INFO (result)->has_constants)
1832 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1833 VN_INFO (lhs)->has_constants = true;
1838 changed = set_ssa_val_to (lhs, lhs);
1839 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1846 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1847 and return true if the value number of the LHS has changed as a result. */
1850 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1852 bool changed = false;
1854 bool resultsame = false;
1856 /* First we want to lookup using the *vuses* from the store and see
1857 if there the last store to this location with the same address
1860 The vuses represent the memory state before the store. If the
1861 memory state, address, and value of the store is the same as the
1862 last store to this location, then this store will produce the
1863 same memory state as that store.
1865 In this case the vdef versions for this store are value numbered to those
1866 vuse versions, since they represent the same memory state after
1869 Otherwise, the vdefs for the store are used when inserting into
1870 the table, since the store generates a new memory state. */
1872 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1877 if (TREE_CODE (result) == SSA_NAME)
1878 result = SSA_VAL (result);
1879 if (TREE_CODE (op) == SSA_NAME)
1881 resultsame = expressions_equal_p (result, op);
1884 if (!result || !resultsame)
1886 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1890 if (dump_file && (dump_flags & TDF_DETAILS))
1892 fprintf (dump_file, "No store match\n");
1893 fprintf (dump_file, "Value numbering store ");
1894 print_generic_expr (dump_file, lhs, 0);
1895 fprintf (dump_file, " to ");
1896 print_generic_expr (dump_file, op, 0);
1897 fprintf (dump_file, "\n");
1899 /* Have to set value numbers before insert, since insert is
1900 going to valueize the references in-place. */
1901 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1903 VN_INFO (vdef)->use_processed = true;
1904 changed |= set_ssa_val_to (vdef, vdef);
1907 /* Do not insert structure copies into the tables. */
1908 if (is_gimple_min_invariant (op)
1909 || is_gimple_reg (op))
1910 vn_reference_insert (lhs, op, vdefs);
1914 /* We had a match, so value number the vdefs to have the value
1915 number of the vuses they came from. */
1916 ssa_op_iter op_iter;
1920 if (dump_file && (dump_flags & TDF_DETAILS))
1921 fprintf (dump_file, "Store matched earlier value,"
1922 "value numbering store vdefs to matching vuses.\n");
1924 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1926 tree def = DEF_FROM_PTR (var);
1929 /* Uh, if the vuse is a multiuse, we can't really do much
1930 here, sadly, since we don't know which value number of
1931 which vuse to use. */
1932 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1935 use = VUSE_ELEMENT_VAR (*vv, 0);
1937 VN_INFO (def)->use_processed = true;
1938 changed |= set_ssa_val_to (def, SSA_VAL (use));
1945 /* Visit and value number PHI, return true if the value number
1949 visit_phi (gimple phi)
1951 bool changed = false;
1953 tree sameval = VN_TOP;
1954 bool allsame = true;
1957 /* TODO: We could check for this in init_sccvn, and replace this
1958 with a gcc_assert. */
1959 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1960 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1962 /* See if all non-TOP arguments have the same value. TOP is
1963 equivalent to everything, so we can ignore it. */
1964 for (i = 0; i < gimple_phi_num_args (phi); i++)
1966 tree def = PHI_ARG_DEF (phi, i);
1968 if (TREE_CODE (def) == SSA_NAME)
1969 def = SSA_VAL (def);
1972 if (sameval == VN_TOP)
1978 if (!expressions_equal_p (def, sameval))
1986 /* If all value numbered to the same value, the phi node has that
1990 if (is_gimple_min_invariant (sameval))
1992 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1993 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1997 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1998 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2001 if (TREE_CODE (sameval) == SSA_NAME)
2002 return visit_copy (PHI_RESULT (phi), sameval);
2004 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2007 /* Otherwise, see if it is equivalent to a phi node in this block. */
2008 result = vn_phi_lookup (phi);
2011 if (TREE_CODE (result) == SSA_NAME)
2012 changed = visit_copy (PHI_RESULT (phi), result);
2014 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2018 vn_phi_insert (phi, PHI_RESULT (phi));
2019 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2020 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2021 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2027 /* Return true if EXPR contains constants. */
2030 expr_has_constants (tree expr)
2032 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2035 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2038 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2039 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2040 /* Constants inside reference ops are rarely interesting, but
2041 it can take a lot of looking to find them. */
2043 case tcc_declaration:
2046 return is_gimple_min_invariant (expr);
2051 /* Return true if STMT contains constants. */
2054 stmt_has_constants (gimple stmt)
2056 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2059 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2061 case GIMPLE_UNARY_RHS:
2062 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2064 case GIMPLE_BINARY_RHS:
2065 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2066 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2067 case GIMPLE_SINGLE_RHS:
2068 /* Constants inside reference ops are rarely interesting, but
2069 it can take a lot of looking to find them. */
2070 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2077 /* Replace SSA_NAMES in expr with their value numbers, and return the
2079 This is performed in place. */
2082 valueize_expr (tree expr)
2084 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2087 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2088 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2089 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2092 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2093 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2094 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2095 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2096 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2097 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2105 /* Simplify the binary expression RHS, and return the result if
2109 simplify_binary_expression (gimple stmt)
2111 tree result = NULL_TREE;
2112 tree op0 = gimple_assign_rhs1 (stmt);
2113 tree op1 = gimple_assign_rhs2 (stmt);
2115 /* This will not catch every single case we could combine, but will
2116 catch those with constants. The goal here is to simultaneously
2117 combine constants between expressions, but avoid infinite
2118 expansion of expressions during simplification. */
2119 if (TREE_CODE (op0) == SSA_NAME)
2121 if (VN_INFO (op0)->has_constants
2122 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2123 op0 = valueize_expr (vn_get_expr_for (op0));
2124 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2125 op0 = SSA_VAL (op0);
2128 if (TREE_CODE (op1) == SSA_NAME)
2130 if (VN_INFO (op1)->has_constants)
2131 op1 = valueize_expr (vn_get_expr_for (op1));
2132 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2133 op1 = SSA_VAL (op1);
2136 /* Avoid folding if nothing changed. */
2137 if (op0 == gimple_assign_rhs1 (stmt)
2138 && op1 == gimple_assign_rhs2 (stmt))
2141 fold_defer_overflow_warnings ();
2143 result = fold_binary (gimple_assign_rhs_code (stmt),
2144 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2146 STRIP_USELESS_TYPE_CONVERSION (result);
2148 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2151 /* Make sure result is not a complex expression consisting
2152 of operators of operators (IE (a + b) + (a + c))
2153 Otherwise, we will end up with unbounded expressions if
2154 fold does anything at all. */
2155 if (result && valid_gimple_rhs_p (result))
2161 /* Simplify the unary expression RHS, and return the result if
2165 simplify_unary_expression (gimple stmt)
2167 tree result = NULL_TREE;
2168 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2170 /* We handle some tcc_reference codes here that are all
2171 GIMPLE_ASSIGN_SINGLE codes. */
2172 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2173 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2174 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2175 op0 = TREE_OPERAND (op0, 0);
2177 if (TREE_CODE (op0) != SSA_NAME)
2181 if (VN_INFO (op0)->has_constants)
2182 op0 = valueize_expr (vn_get_expr_for (op0));
2183 else if (gimple_assign_cast_p (stmt)
2184 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2185 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2186 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2188 /* We want to do tree-combining on conversion-like expressions.
2189 Make sure we feed only SSA_NAMEs or constants to fold though. */
2190 tree tem = valueize_expr (vn_get_expr_for (op0));
2191 if (UNARY_CLASS_P (tem)
2192 || BINARY_CLASS_P (tem)
2193 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2194 || TREE_CODE (tem) == SSA_NAME
2195 || is_gimple_min_invariant (tem))
2199 /* Avoid folding if nothing changed, but remember the expression. */
2200 if (op0 == orig_op0)
2203 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2204 gimple_expr_type (stmt), op0);
2207 STRIP_USELESS_TYPE_CONVERSION (result);
2208 if (valid_gimple_rhs_p (result))
2215 /* Try to simplify RHS using equivalences and constant folding. */
2218 try_to_simplify (gimple stmt)
2222 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2223 in this case, there is no point in doing extra work. */
2224 if (gimple_assign_copy_p (stmt)
2225 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2228 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2230 case tcc_declaration:
2231 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2237 /* Do not do full-blown reference lookup here, but simplify
2238 reads from constant aggregates. */
2239 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2243 /* Fallthrough for some codes that can operate on registers. */
2244 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2245 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2246 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2248 /* We could do a little more with unary ops, if they expand
2249 into binary ops, but it's debatable whether it is worth it. */
2251 return simplify_unary_expression (stmt);
2253 case tcc_comparison:
2255 return simplify_binary_expression (stmt);
2264 /* Visit and value number USE, return true if the value number
2268 visit_use (tree use)
2270 bool changed = false;
2271 gimple stmt = SSA_NAME_DEF_STMT (use);
2273 VN_INFO (use)->use_processed = true;
2275 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2276 if (dump_file && (dump_flags & TDF_DETAILS)
2277 && !SSA_NAME_IS_DEFAULT_DEF (use))
2279 fprintf (dump_file, "Value numbering ");
2280 print_generic_expr (dump_file, use, 0);
2281 fprintf (dump_file, " stmt = ");
2282 print_gimple_stmt (dump_file, stmt, 0, 0);
2285 /* Handle uninitialized uses. */
2286 if (SSA_NAME_IS_DEFAULT_DEF (use))
2287 changed = set_ssa_val_to (use, use);
2290 if (gimple_code (stmt) == GIMPLE_PHI)
2291 changed = visit_phi (stmt);
2292 else if (!gimple_has_lhs (stmt)
2293 || gimple_has_volatile_ops (stmt)
2294 || stmt_could_throw_p (stmt))
2295 changed = defs_to_varying (stmt);
2296 else if (is_gimple_assign (stmt))
2298 tree lhs = gimple_assign_lhs (stmt);
2301 /* Shortcut for copies. Simplifying copies is pointless,
2302 since we copy the expression and value they represent. */
2303 if (gimple_assign_copy_p (stmt)
2304 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2305 && TREE_CODE (lhs) == SSA_NAME)
2307 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2310 simplified = try_to_simplify (stmt);
2313 if (dump_file && (dump_flags & TDF_DETAILS))
2315 fprintf (dump_file, "RHS ");
2316 print_gimple_expr (dump_file, stmt, 0, 0);
2317 fprintf (dump_file, " simplified to ");
2318 print_generic_expr (dump_file, simplified, 0);
2319 if (TREE_CODE (lhs) == SSA_NAME)
2320 fprintf (dump_file, " has constants %d\n",
2321 expr_has_constants (simplified));
2323 fprintf (dump_file, "\n");
2326 /* Setting value numbers to constants will occasionally
2327 screw up phi congruence because constants are not
2328 uniquely associated with a single ssa name that can be
2331 && is_gimple_min_invariant (simplified)
2332 && TREE_CODE (lhs) == SSA_NAME)
2334 VN_INFO (lhs)->expr = simplified;
2335 VN_INFO (lhs)->has_constants = true;
2336 changed = set_ssa_val_to (lhs, simplified);
2340 && TREE_CODE (simplified) == SSA_NAME
2341 && TREE_CODE (lhs) == SSA_NAME)
2343 changed = visit_copy (lhs, simplified);
2346 else if (simplified)
2348 if (TREE_CODE (lhs) == SSA_NAME)
2350 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2351 /* We have to unshare the expression or else
2352 valuizing may change the IL stream. */
2353 VN_INFO (lhs)->expr = unshare_expr (simplified);
2356 else if (stmt_has_constants (stmt)
2357 && TREE_CODE (lhs) == SSA_NAME)
2358 VN_INFO (lhs)->has_constants = true;
2359 else if (TREE_CODE (lhs) == SSA_NAME)
2361 /* We reset expr and constantness here because we may
2362 have been value numbering optimistically, and
2363 iterating. They may become non-constant in this case,
2364 even if they were optimistically constant. */
2366 VN_INFO (lhs)->has_constants = false;
2367 VN_INFO (lhs)->expr = NULL_TREE;
2370 if (TREE_CODE (lhs) == SSA_NAME
2371 /* We can substitute SSA_NAMEs that are live over
2372 abnormal edges with their constant value. */
2373 && !(gimple_assign_copy_p (stmt)
2374 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2376 && is_gimple_min_invariant (simplified))
2377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2378 changed = defs_to_varying (stmt);
2379 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2381 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2383 else if (TREE_CODE (lhs) == SSA_NAME)
2385 if ((gimple_assign_copy_p (stmt)
2386 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2388 && is_gimple_min_invariant (simplified)))
2390 VN_INFO (lhs)->has_constants = true;
2392 changed = set_ssa_val_to (lhs, simplified);
2394 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2398 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2400 case GIMPLE_UNARY_RHS:
2401 changed = visit_unary_op (lhs, stmt);
2403 case GIMPLE_BINARY_RHS:
2404 changed = visit_binary_op (lhs, stmt);
2406 case GIMPLE_SINGLE_RHS:
2407 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2410 /* VOP-less references can go through unary case. */
2411 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2412 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2413 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2414 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2416 changed = visit_unary_op (lhs, stmt);
2420 case tcc_declaration:
2421 changed = visit_reference_op_load
2422 (lhs, gimple_assign_rhs1 (stmt), stmt);
2424 case tcc_expression:
2425 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2427 changed = visit_unary_op (lhs, stmt);
2432 changed = defs_to_varying (stmt);
2436 changed = defs_to_varying (stmt);
2442 changed = defs_to_varying (stmt);
2444 else if (is_gimple_call (stmt))
2446 tree lhs = gimple_call_lhs (stmt);
2448 /* ??? We could try to simplify calls. */
2450 if (stmt_has_constants (stmt)
2451 && TREE_CODE (lhs) == SSA_NAME)
2452 VN_INFO (lhs)->has_constants = true;
2453 else if (TREE_CODE (lhs) == SSA_NAME)
2455 /* We reset expr and constantness here because we may
2456 have been value numbering optimistically, and
2457 iterating. They may become non-constant in this case,
2458 even if they were optimistically constant. */
2459 VN_INFO (lhs)->has_constants = false;
2460 VN_INFO (lhs)->expr = NULL_TREE;
2463 if (TREE_CODE (lhs) == SSA_NAME
2464 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2465 changed = defs_to_varying (stmt);
2466 /* ??? We should handle stores from calls. */
2467 else if (TREE_CODE (lhs) == SSA_NAME)
2469 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2470 changed = visit_reference_op_call (lhs, stmt);
2472 changed = defs_to_varying (stmt);
2475 changed = defs_to_varying (stmt);
2482 /* Compare two operands by reverse postorder index */
2485 compare_ops (const void *pa, const void *pb)
2487 const tree opa = *((const tree *)pa);
2488 const tree opb = *((const tree *)pb);
2489 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2490 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2494 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2496 else if (gimple_nop_p (opstmta))
2498 else if (gimple_nop_p (opstmtb))
2501 bba = gimple_bb (opstmta);
2502 bbb = gimple_bb (opstmtb);
2513 if (gimple_code (opstmta) == GIMPLE_PHI
2514 && gimple_code (opstmtb) == GIMPLE_PHI)
2516 else if (gimple_code (opstmta) == GIMPLE_PHI)
2518 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2520 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2522 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2525 /* Sort an array containing members of a strongly connected component
2526 SCC so that the members are ordered by RPO number.
2527 This means that when the sort is complete, iterating through the
2528 array will give you the members in RPO order. */
2531 sort_scc (VEC (tree, heap) *scc)
2533 qsort (VEC_address (tree, scc),
2534 VEC_length (tree, scc),
2539 /* Process a strongly connected component in the SSA graph. */
2542 process_scc (VEC (tree, heap) *scc)
2544 /* If the SCC has a single member, just visit it. */
2546 if (VEC_length (tree, scc) == 1)
2548 tree use = VEC_index (tree, scc, 0);
2549 if (!VN_INFO (use)->use_processed)
2556 unsigned int iterations = 0;
2557 bool changed = true;
2559 /* Iterate over the SCC with the optimistic table until it stops
2561 current_info = optimistic_info;
2566 /* As we are value-numbering optimistically we have to
2567 clear the expression tables and the simplified expressions
2568 in each iteration until we converge. */
2569 htab_empty (optimistic_info->nary);
2570 htab_empty (optimistic_info->phis);
2571 htab_empty (optimistic_info->references);
2572 obstack_free (&optimistic_info->nary_obstack, NULL);
2573 gcc_obstack_init (&optimistic_info->nary_obstack);
2574 empty_alloc_pool (optimistic_info->phis_pool);
2575 empty_alloc_pool (optimistic_info->references_pool);
2576 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2577 VN_INFO (var)->expr = NULL_TREE;
2578 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2579 changed |= visit_use (var);
2582 statistics_histogram_event (cfun, "SCC iterations", iterations);
2584 /* Finally, visit the SCC once using the valid table. */
2585 current_info = valid_info;
2586 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2591 DEF_VEC_O(ssa_op_iter);
2592 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2594 /* Pop the components of the found SCC for NAME off the SCC stack
2595 and process them. Returns true if all went well, false if
2596 we run into resource limits. */
2599 extract_and_process_scc_for_name (tree name)
2601 VEC (tree, heap) *scc = NULL;
2604 /* Found an SCC, pop the components off the SCC stack and
2608 x = VEC_pop (tree, sccstack);
2610 VN_INFO (x)->on_sccstack = false;
2611 VEC_safe_push (tree, heap, scc, x);
2612 } while (x != name);
2614 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2615 if (VEC_length (tree, scc)
2616 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2619 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2620 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2621 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2625 if (VEC_length (tree, scc) > 1)
2628 if (dump_file && (dump_flags & TDF_DETAILS))
2629 print_scc (dump_file, scc);
2633 VEC_free (tree, heap, scc);
2638 /* Depth first search on NAME to discover and process SCC's in the SSA
2640 Execution of this algorithm relies on the fact that the SCC's are
2641 popped off the stack in topological order.
2642 Returns true if successful, false if we stopped processing SCC's due
2643 to resource constraints. */
2648 VEC(ssa_op_iter, heap) *itervec = NULL;
2649 VEC(tree, heap) *namevec = NULL;
2650 use_operand_p usep = NULL;
2657 VN_INFO (name)->dfsnum = next_dfs_num++;
2658 VN_INFO (name)->visited = true;
2659 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2661 VEC_safe_push (tree, heap, sccstack, name);
2662 VN_INFO (name)->on_sccstack = true;
2663 defstmt = SSA_NAME_DEF_STMT (name);
2665 /* Recursively DFS on our operands, looking for SCC's. */
2666 if (!gimple_nop_p (defstmt))
2668 /* Push a new iterator. */
2669 if (gimple_code (defstmt) == GIMPLE_PHI)
2670 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2672 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2675 clear_and_done_ssa_iter (&iter);
2679 /* If we are done processing uses of a name, go up the stack
2680 of iterators and process SCCs as we found them. */
2681 if (op_iter_done (&iter))
2683 /* See if we found an SCC. */
2684 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2685 if (!extract_and_process_scc_for_name (name))
2687 VEC_free (tree, heap, namevec);
2688 VEC_free (ssa_op_iter, heap, itervec);
2692 /* Check if we are done. */
2693 if (VEC_empty (tree, namevec))
2695 VEC_free (tree, heap, namevec);
2696 VEC_free (ssa_op_iter, heap, itervec);
2700 /* Restore the last use walker and continue walking there. */
2702 name = VEC_pop (tree, namevec);
2703 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2704 sizeof (ssa_op_iter));
2705 VEC_pop (ssa_op_iter, itervec);
2706 goto continue_walking;
2709 use = USE_FROM_PTR (usep);
2711 /* Since we handle phi nodes, we will sometimes get
2712 invariants in the use expression. */
2713 if (TREE_CODE (use) == SSA_NAME)
2715 if (! (VN_INFO (use)->visited))
2717 /* Recurse by pushing the current use walking state on
2718 the stack and starting over. */
2719 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2720 VEC_safe_push(tree, heap, namevec, name);
2725 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2726 VN_INFO (use)->low);
2728 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2729 && VN_INFO (use)->on_sccstack)
2731 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2732 VN_INFO (name)->low);
2736 usep = op_iter_next_use (&iter);
2740 /* Allocate a value number table. */
2743 allocate_vn_table (vn_tables_t table)
2745 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2746 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2747 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2750 gcc_obstack_init (&table->nary_obstack);
2751 table->phis_pool = create_alloc_pool ("VN phis",
2752 sizeof (struct vn_phi_s),
2754 table->references_pool = create_alloc_pool ("VN references",
2755 sizeof (struct vn_reference_s),
2759 /* Free a value number table. */
2762 free_vn_table (vn_tables_t table)
2764 htab_delete (table->phis);
2765 htab_delete (table->nary);
2766 htab_delete (table->references);
2767 obstack_free (&table->nary_obstack, NULL);
2768 free_alloc_pool (table->phis_pool);
2769 free_alloc_pool (table->references_pool);
2777 int *rpo_numbers_temp;
2779 calculate_dominance_info (CDI_DOMINATORS);
2781 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2784 constant_value_ids = BITMAP_ALLOC (NULL);
2789 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2790 /* VEC_alloc doesn't actually grow it to the right size, it just
2791 preallocates the space to do so. */
2792 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2793 gcc_obstack_init (&vn_ssa_aux_obstack);
2795 shared_lookup_phiargs = NULL;
2796 shared_lookup_vops = NULL;
2797 shared_lookup_references = NULL;
2798 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2799 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2800 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2802 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2803 the i'th block in RPO order is bb. We want to map bb's to RPO
2804 numbers, so we need to rearrange this array. */
2805 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2806 rpo_numbers[rpo_numbers_temp[j]] = j;
2808 XDELETE (rpo_numbers_temp);
2810 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2812 /* Create the VN_INFO structures, and initialize value numbers to
2814 for (i = 0; i < num_ssa_names; i++)
2816 tree name = ssa_name (i);
2819 VN_INFO_GET (name)->valnum = VN_TOP;
2820 VN_INFO (name)->expr = NULL_TREE;
2821 VN_INFO (name)->value_id = 0;
2825 renumber_gimple_stmt_uids ();
2827 /* Create the valid and optimistic value numbering tables. */
2828 valid_info = XCNEW (struct vn_tables_s);
2829 allocate_vn_table (valid_info);
2830 optimistic_info = XCNEW (struct vn_tables_s);
2831 allocate_vn_table (optimistic_info);
2839 htab_delete (constant_to_value_id);
2840 BITMAP_FREE (constant_value_ids);
2841 VEC_free (tree, heap, shared_lookup_phiargs);
2842 VEC_free (tree, gc, shared_lookup_vops);
2843 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2844 XDELETEVEC (rpo_numbers);
2846 for (i = 0; i < num_ssa_names; i++)
2848 tree name = ssa_name (i);
2850 && VN_INFO (name)->needs_insertion)
2851 release_ssa_name (name);
2853 obstack_free (&vn_ssa_aux_obstack, NULL);
2854 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2856 VEC_free (tree, heap, sccstack);
2857 free_vn_table (valid_info);
2858 XDELETE (valid_info);
2859 free_vn_table (optimistic_info);
2860 XDELETE (optimistic_info);
2863 /* Set the value ids in the valid hash tables. */
2866 set_hashtable_value_ids (void)
2873 /* Now set the value ids of the things we had put in the hash
2876 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2877 vno, vn_nary_op_t, hi)
2881 if (TREE_CODE (vno->result) == SSA_NAME)
2882 vno->value_id = VN_INFO (vno->result)->value_id;
2883 else if (is_gimple_min_invariant (vno->result))
2884 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2888 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2893 if (TREE_CODE (vp->result) == SSA_NAME)
2894 vp->value_id = VN_INFO (vp->result)->value_id;
2895 else if (is_gimple_min_invariant (vp->result))
2896 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2900 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2901 vr, vn_reference_t, hi)
2905 if (TREE_CODE (vr->result) == SSA_NAME)
2906 vr->value_id = VN_INFO (vr->result)->value_id;
2907 else if (is_gimple_min_invariant (vr->result))
2908 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2913 /* Do SCCVN. Returns true if it finished, false if we bailed out
2914 due to resource constraints. */
2917 run_scc_vn (bool may_insert_arg)
2921 bool changed = true;
2923 may_insert = may_insert_arg;
2926 current_info = valid_info;
2928 for (param = DECL_ARGUMENTS (current_function_decl);
2930 param = TREE_CHAIN (param))
2932 if (gimple_default_def (cfun, param) != NULL)
2934 tree def = gimple_default_def (cfun, param);
2935 SSA_VAL (def) = def;
2939 for (i = 1; i < num_ssa_names; ++i)
2941 tree name = ssa_name (i);
2943 && VN_INFO (name)->visited == false
2944 && !has_zero_uses (name))
2953 /* Initialize the value ids. */
2955 for (i = 1; i < num_ssa_names; ++i)
2957 tree name = ssa_name (i);
2961 info = VN_INFO (name);
2962 if (info->valnum == name)
2963 info->value_id = get_next_value_id ();
2964 else if (is_gimple_min_invariant (info->valnum))
2965 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2968 /* Propagate until they stop changing. */
2972 for (i = 1; i < num_ssa_names; ++i)
2974 tree name = ssa_name (i);
2978 info = VN_INFO (name);
2979 if (TREE_CODE (info->valnum) == SSA_NAME
2980 && info->valnum != name
2981 && info->value_id != VN_INFO (info->valnum)->value_id)
2984 info->value_id = VN_INFO (info->valnum)->value_id;
2989 set_hashtable_value_ids ();
2991 if (dump_file && (dump_flags & TDF_DETAILS))
2993 fprintf (dump_file, "Value numbers:\n");
2994 for (i = 0; i < num_ssa_names; i++)
2996 tree name = ssa_name (i);
2998 && VN_INFO (name)->visited
2999 && SSA_VAL (name) != name)
3001 print_generic_expr (dump_file, name, 0);
3002 fprintf (dump_file, " = ");
3003 print_generic_expr (dump_file, SSA_VAL (name), 0);
3004 fprintf (dump_file, "\n");
3013 /* Return the maximum value id we have ever seen. */
3016 get_max_value_id (void)
3018 return next_value_id;
3021 /* Return the next unique value id. */
3024 get_next_value_id (void)
3026 return next_value_id++;
3030 /* Compare two expressions E1 and E2 and return true if they are equal. */
3033 expressions_equal_p (tree e1, tree e2)
3035 /* The obvious case. */
3039 /* If only one of them is null, they cannot be equal. */
3043 /* Recurse on elements of lists. */
3044 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3048 for (lop1 = e1, lop2 = e2;
3050 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3054 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3060 /* Now perform the actual comparison. */
3061 if (TREE_CODE (e1) == TREE_CODE (e2)
3062 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3068 /* Sort the VUSE array so that we can do equality comparisons
3069 quicker on two vuse vecs. */
3072 sort_vuses (VEC (tree,gc) *vuses)
3074 if (VEC_length (tree, vuses) > 1)
3075 qsort (VEC_address (tree, vuses),
3076 VEC_length (tree, vuses),
3081 /* Sort the VUSE array so that we can do equality comparisons
3082 quicker on two vuse vecs. */
3085 sort_vuses_heap (VEC (tree,heap) *vuses)
3087 if (VEC_length (tree, vuses) > 1)
3088 qsort (VEC_address (tree, vuses),
3089 VEC_length (tree, vuses),
3095 /* Return true if the nary operation NARY may trap. This is a copy
3096 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3099 vn_nary_may_trap (vn_nary_op_t nary)
3103 bool honor_nans = false;
3104 bool honor_snans = false;
3105 bool fp_operation = false;
3106 bool honor_trapv = false;
3110 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3111 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3112 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3115 fp_operation = FLOAT_TYPE_P (type);
3118 honor_nans = flag_trapping_math && !flag_finite_math_only;
3119 honor_snans = flag_signaling_nans != 0;
3121 else if (INTEGRAL_TYPE_P (type)
3122 && TYPE_OVERFLOW_TRAPS (type))
3126 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3128 honor_nans, honor_snans, rhs2,
3134 for (i = 0; i < nary->length; ++i)
3135 if (tree_could_trap_p (nary->op[i]))