1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
283 if (expr == NULL_TREE)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
322 /* Hash table hash function for vn_constant_t. */
325 vn_constant_hash (const void *p1)
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
331 /* Lookup a value id for CONSTANT and return it. If it does not
335 get_constant_value_id (tree constant)
338 struct vn_constant_s vc;
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
345 return ((vn_constant_t)*slot)->value_id;
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
353 get_or_alloc_constant_value_id (tree constant)
356 vn_constant_t vc = XNEW (struct vn_constant_s);
358 vc->hashcode = vn_hash_constant_with_type (constant);
359 vc->constant = constant;
360 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
361 vc->hashcode, INSERT);
365 return ((vn_constant_t)*slot)->value_id;
367 vc->value_id = get_next_value_id ();
369 bitmap_set_bit (constant_value_ids, vc->value_id);
373 /* Return true if V is a value id for a constant. */
376 value_id_constant_p (unsigned int v)
378 return bitmap_bit_p (constant_value_ids, v);
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
385 vn_reference_op_eq (const void *p1, const void *p2)
387 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
388 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
389 return vro1->opcode == vro2->opcode
390 && types_compatible_p (vro1->type, vro2->type)
391 && expressions_equal_p (vro1->op0, vro2->op0)
392 && expressions_equal_p (vro1->op1, vro2->op1)
393 && expressions_equal_p (vro1->op2, vro2->op2);
396 /* Compute the hash for a reference operand VRO1. */
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
401 return iterative_hash_expr (vro1->op0, vro1->opcode)
402 + iterative_hash_expr (vro1->op1, vro1->opcode)
403 + iterative_hash_expr (vro1->op2, vro1->opcode);
406 /* Return the hashcode for a given reference operation P1. */
409 vn_reference_hash (const void *p1)
411 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
412 return vr1->hashcode;
415 /* Compute a hash for the reference operation VR1 and return it. */
418 vn_reference_compute_hash (const vn_reference_t vr1)
420 hashval_t result = 0;
423 vn_reference_op_t vro;
425 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
426 result += iterative_hash_expr (v, 0);
427 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
428 result += vn_reference_op_compute_hash (vro);
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
437 vn_reference_eq (const void *p1, const void *p2)
441 vn_reference_op_t vro;
443 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
444 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
446 if (vr1->vuses == vr2->vuses
447 && vr1->operands == vr2->operands)
450 /* Impossible for them to be equivalent if they have different
452 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
458 if (VEC_length (vn_reference_op_s, vr1->operands)
459 != VEC_length (vn_reference_op_s, vr2->operands))
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
466 if (VEC_index (tree, vr2->vuses, i) != v)
470 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
479 /* Place the vuses from STMT into *result. */
482 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
490 VEC_reserve_exact (tree, gc, *result,
491 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
493 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
494 VEC_quick_push (tree, *result, vuse);
498 /* Copy the VUSE names in STMT into a vector, and return
501 static VEC (tree, gc) *
502 copy_vuses_from_stmt (gimple stmt)
504 VEC (tree, gc) *vuses = NULL;
506 vuses_to_vec (stmt, &vuses);
511 /* Place the vdefs from STMT into *result. */
514 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
522 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
524 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
525 VEC_quick_push (tree, *result, vdef);
528 /* Copy the names of vdef results in STMT into a vector, and return
531 static VEC (tree, gc) *
532 copy_vdefs_from_stmt (gimple stmt)
534 VEC (tree, gc) *vdefs = NULL;
536 vdefs_to_vec (stmt, &vdefs);
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree, gc) *shared_lookup_vops;
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
549 shared_vuses_from_stmt (gimple stmt)
551 VEC_truncate (tree, shared_lookup_vops, 0);
552 vuses_to_vec (stmt, &shared_lookup_vops);
554 return shared_lookup_vops;
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
561 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
563 if (TREE_CODE (ref) == TARGET_MEM_REF)
565 vn_reference_op_s temp;
567 memset (&temp, 0, sizeof (temp));
568 /* We do not care for spurious type qualifications. */
569 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
570 temp.opcode = TREE_CODE (ref);
571 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
572 temp.op1 = TMR_INDEX (ref);
573 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
575 memset (&temp, 0, sizeof (temp));
576 temp.type = NULL_TREE;
577 temp.opcode = TREE_CODE (ref);
578 temp.op0 = TMR_STEP (ref);
579 temp.op1 = TMR_OFFSET (ref);
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
584 /* For non-calls, store the information that makes up the address. */
588 vn_reference_op_s temp;
590 memset (&temp, 0, sizeof (temp));
591 /* We do not care for spurious type qualifications. */
592 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
593 temp.opcode = TREE_CODE (ref);
597 case ALIGN_INDIRECT_REF:
599 /* The only operand is the address, which gets its own
600 vn_reference_op_s structure. */
602 case MISALIGNED_INDIRECT_REF:
603 temp.op0 = TREE_OPERAND (ref, 1);
606 /* Record bits and position. */
607 temp.op0 = TREE_OPERAND (ref, 1);
608 temp.op1 = TREE_OPERAND (ref, 2);
611 /* The field decl is enough to unambiguously specify the field,
612 a matching type is not necessary and a mismatching type
613 is always a spurious difference. */
614 temp.type = NULL_TREE;
615 /* If this is a reference to a union member, record the union
616 member size as operand. Do so only if we are doing
617 expression insertion (during FRE), as PRE currently gets
618 confused with this. */
620 && TREE_OPERAND (ref, 2) == NULL_TREE
621 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
622 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
623 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
624 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
627 /* Record field as operand. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
632 case ARRAY_RANGE_REF:
634 /* Record index as operand. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 temp.op2 = TREE_OPERAND (ref, 3);
653 if (is_gimple_min_invariant (ref))
659 /* These are only interesting for their operands, their
660 existence, and their type. They will never be the last
661 ref in the chain of references (IE they require an
662 operand), so we don't have to put anything
663 for op* as it will be handled by the iteration */
666 case VIEW_CONVERT_EXPR:
671 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
673 if (REFERENCE_CLASS_P (ref)
674 || (TREE_CODE (ref) == ADDR_EXPR
675 && !is_gimple_min_invariant (ref)))
676 ref = TREE_OPERAND (ref, 0);
682 /* Re-create a reference tree from the reference ops OPS.
683 Returns NULL_TREE if the ops were not handled.
684 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
687 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
689 vn_reference_op_t op;
691 tree ref, *op0_p = &ref;
693 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
700 case ALIGN_INDIRECT_REF:
702 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
703 op0_p = &TREE_OPERAND (*op0_p, 0);
706 case MISALIGNED_INDIRECT_REF:
707 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
709 op0_p = &TREE_OPERAND (*op0_p, 0);
713 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
715 op0_p = &TREE_OPERAND (*op0_p, 0);
719 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
721 op0_p = &TREE_OPERAND (*op0_p, 0);
724 case ARRAY_RANGE_REF:
726 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
727 op->op0, op->op1, op->op2);
728 op0_p = &TREE_OPERAND (*op0_p, 0);
746 if (op->op0 != NULL_TREE)
748 gcc_assert (is_gimple_min_invariant (op->op0));
755 case VIEW_CONVERT_EXPR:
756 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
757 op0_p = &TREE_OPERAND (*op0_p, 0);
768 /* Copy the operations present in load/store/call REF into RESULT, a vector of
769 vn_reference_op_s's. */
772 copy_reference_ops_from_call (gimple call,
773 VEC(vn_reference_op_s, heap) **result)
775 vn_reference_op_s temp;
778 /* Copy the type, opcode, function being called and static chain. */
779 memset (&temp, 0, sizeof (temp));
780 temp.type = gimple_call_return_type (call);
781 temp.opcode = CALL_EXPR;
782 temp.op0 = gimple_call_fn (call);
783 temp.op1 = gimple_call_chain (call);
784 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
786 /* Copy the call arguments. As they can be references as well,
787 just chain them together. */
788 for (i = 0; i < gimple_call_num_args (call); ++i)
790 tree callarg = gimple_call_arg (call, i);
791 copy_reference_ops_from_ref (callarg, result);
795 /* Create a vector of vn_reference_op_s structures from REF, a
796 REFERENCE_CLASS_P tree. The vector is not shared. */
798 static VEC(vn_reference_op_s, heap) *
799 create_reference_ops_from_ref (tree ref)
801 VEC (vn_reference_op_s, heap) *result = NULL;
803 copy_reference_ops_from_ref (ref, &result);
807 /* Create a vector of vn_reference_op_s structures from CALL, a
808 call statement. The vector is not shared. */
810 static VEC(vn_reference_op_s, heap) *
811 create_reference_ops_from_call (gimple call)
813 VEC (vn_reference_op_s, heap) *result = NULL;
815 copy_reference_ops_from_call (call, &result);
819 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
821 /* Create a vector of vn_reference_op_s structures from REF, a
822 REFERENCE_CLASS_P tree. The vector is shared among all callers of
825 static VEC(vn_reference_op_s, heap) *
826 shared_reference_ops_from_ref (tree ref)
830 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
831 copy_reference_ops_from_ref (ref, &shared_lookup_references);
832 return shared_lookup_references;
835 /* Create a vector of vn_reference_op_s structures from CALL, a
836 call statement. The vector is shared among all callers of
839 static VEC(vn_reference_op_s, heap) *
840 shared_reference_ops_from_call (gimple call)
844 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
845 copy_reference_ops_from_call (call, &shared_lookup_references);
846 return shared_lookup_references;
850 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
851 structures into their value numbers. This is done in-place, and
852 the vector passed in is returned. */
854 static VEC (vn_reference_op_s, heap) *
855 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
857 vn_reference_op_t vro;
860 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
862 if (vro->opcode == SSA_NAME
863 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
865 vro->op0 = SSA_VAL (vro->op0);
866 /* If it transforms from an SSA_NAME to a constant, update
868 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
869 vro->opcode = TREE_CODE (vro->op0);
871 /* TODO: Do we want to valueize op2 and op1 of
872 ARRAY_REF/COMPONENT_REF for Ada */
879 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
880 their value numbers. This is done in-place, and the vector passed
883 static VEC (tree, gc) *
884 valueize_vuses (VEC (tree, gc) *orig)
886 bool made_replacement = false;
890 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
892 if (vuse != SSA_VAL (vuse))
894 made_replacement = true;
895 VEC_replace (tree, orig, i, SSA_VAL (vuse));
899 if (made_replacement && VEC_length (tree, orig) > 1)
905 /* Return the single reference statement defining all virtual uses
906 in VUSES or NULL_TREE, if there are multiple defining statements.
907 Take into account only definitions that alias REF if following
911 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
917 gcc_assert (VEC_length (tree, vuses) >= 1);
919 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
920 if (gimple_code (def_stmt) == GIMPLE_PHI)
922 /* We can only handle lookups over PHI nodes for a single
924 if (VEC_length (tree, vuses) == 1)
926 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
933 /* Verify each VUSE reaches the same defining stmt. */
934 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
936 gimple tmp = SSA_NAME_DEF_STMT (vuse);
941 /* Now see if the definition aliases ref, and loop until it does. */
944 && is_gimple_assign (def_stmt)
945 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
946 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
951 /* Lookup a SCCVN reference operation VR in the current hash table.
952 Returns the resulting value number if it exists in the hash table,
953 NULL_TREE otherwise. VNRESULT will be filled in with the actual
954 vn_reference_t stored in the hashtable if something is found. */
957 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
963 slot = htab_find_slot_with_hash (current_info->references, vr,
965 if (!slot && current_info == optimistic_info)
966 slot = htab_find_slot_with_hash (valid_info->references, vr,
971 *vnresult = (vn_reference_t)*slot;
972 return ((vn_reference_t)*slot)->result;
979 /* Lookup a reference operation by it's parts, in the current hash table.
980 Returns the resulting value number if it exists in the hash table,
981 NULL_TREE otherwise. VNRESULT will be filled in with the actual
982 vn_reference_t stored in the hashtable if something is found. */
985 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
986 VEC (vn_reference_op_s, heap) *operands,
987 vn_reference_t *vnresult, bool maywalk)
989 struct vn_reference_s vr1;
994 vr1.vuses = valueize_vuses (vuses);
995 vr1.operands = valueize_refs (operands);
996 vr1.hashcode = vn_reference_compute_hash (&vr1);
997 result = vn_reference_lookup_1 (&vr1, vnresult);
999 /* If there is a single defining statement for all virtual uses, we can
1000 use that, following virtual use-def chains. */
1004 && VEC_length (tree, vr1.vuses) >= 1)
1006 tree ref = get_ref_from_reference_ops (operands);
1009 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1010 && is_gimple_assign (def_stmt))
1012 /* We are now at an aliasing definition for the vuses we want to
1013 look up. Re-do the lookup with the vdefs for this stmt. */
1014 vdefs_to_vec (def_stmt, &vuses);
1015 vr1.vuses = valueize_vuses (vuses);
1016 vr1.hashcode = vn_reference_compute_hash (&vr1);
1017 result = vn_reference_lookup_1 (&vr1, vnresult);
1024 /* Lookup OP in the current hash table, and return the resulting value
1025 number if it exists in the hash table. Return NULL_TREE if it does
1026 not exist in the hash table or if the result field of the structure
1027 was NULL.. VNRESULT will be filled in with the vn_reference_t
1028 stored in the hashtable if one exists. */
1031 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1032 vn_reference_t *vnresult)
1034 struct vn_reference_s vr1;
1040 vr1.vuses = valueize_vuses (vuses);
1041 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1042 vr1.hashcode = vn_reference_compute_hash (&vr1);
1043 result = vn_reference_lookup_1 (&vr1, vnresult);
1045 /* If there is a single defining statement for all virtual uses, we can
1046 use that, following virtual use-def chains. */
1050 && VEC_length (tree, vr1.vuses) >= 1
1051 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1052 && is_gimple_assign (def_stmt))
1054 /* We are now at an aliasing definition for the vuses we want to
1055 look up. Re-do the lookup with the vdefs for this stmt. */
1056 vdefs_to_vec (def_stmt, &vuses);
1057 vr1.vuses = valueize_vuses (vuses);
1058 vr1.hashcode = vn_reference_compute_hash (&vr1);
1059 result = vn_reference_lookup_1 (&vr1, vnresult);
1066 /* Insert OP into the current hash table with a value number of
1067 RESULT, and return the resulting reference structure we created. */
1070 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1075 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1076 if (TREE_CODE (result) == SSA_NAME)
1077 vr1->value_id = VN_INFO (result)->value_id;
1079 vr1->value_id = get_or_alloc_constant_value_id (result);
1080 vr1->vuses = valueize_vuses (vuses);
1081 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1082 vr1->hashcode = vn_reference_compute_hash (vr1);
1083 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1085 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1088 /* Because we lookup stores using vuses, and value number failures
1089 using the vdefs (see visit_reference_op_store for how and why),
1090 it's possible that on failure we may try to insert an already
1091 inserted store. This is not wrong, there is no ssa name for a
1092 store that we could use as a differentiator anyway. Thus, unlike
1093 the other lookup functions, you cannot gcc_assert (!*slot)
1096 /* But free the old slot in case of a collision. */
1098 free_reference (*slot);
1104 /* Insert a reference by it's pieces into the current hash table with
1105 a value number of RESULT. Return the resulting reference
1106 structure we created. */
1109 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1110 VEC (vn_reference_op_s, heap) *operands,
1111 tree result, unsigned int value_id)
1117 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1118 vr1->value_id = value_id;
1119 vr1->vuses = valueize_vuses (vuses);
1120 vr1->operands = valueize_refs (operands);
1121 vr1->hashcode = vn_reference_compute_hash (vr1);
1122 if (result && TREE_CODE (result) == SSA_NAME)
1123 result = SSA_VAL (result);
1124 vr1->result = result;
1126 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1129 /* At this point we should have all the things inserted that we have
1130 seen before, and we should never try inserting something that
1132 gcc_assert (!*slot);
1134 free_reference (*slot);
1140 /* Compute and return the hash value for nary operation VBO1. */
1143 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1148 for (i = 0; i < vno1->length; ++i)
1149 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1150 vno1->op[i] = SSA_VAL (vno1->op[i]);
1152 if (vno1->length == 2
1153 && commutative_tree_code (vno1->opcode)
1154 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1156 tree temp = vno1->op[0];
1157 vno1->op[0] = vno1->op[1];
1161 for (i = 0; i < vno1->length; ++i)
1162 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1167 /* Return the computed hashcode for nary operation P1. */
1170 vn_nary_op_hash (const void *p1)
1172 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1173 return vno1->hashcode;
1176 /* Compare nary operations P1 and P2 and return true if they are
1180 vn_nary_op_eq (const void *p1, const void *p2)
1182 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1183 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1186 if (vno1->opcode != vno2->opcode
1187 || !types_compatible_p (vno1->type, vno2->type))
1190 for (i = 0; i < vno1->length; ++i)
1191 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1197 /* Lookup a n-ary operation by its pieces and return the resulting value
1198 number if it exists in the hash table. Return NULL_TREE if it does
1199 not exist in the hash table or if the result field of the operation
1200 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1204 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1205 tree type, tree op0, tree op1, tree op2,
1206 tree op3, vn_nary_op_t *vnresult)
1209 struct vn_nary_op_s vno1;
1213 vno1.length = length;
1219 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1220 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1222 if (!slot && current_info == optimistic_info)
1223 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1228 *vnresult = (vn_nary_op_t)*slot;
1229 return ((vn_nary_op_t)*slot)->result;
1232 /* Lookup OP in the current hash table, and return the resulting value
1233 number if it exists in the hash table. Return NULL_TREE if it does
1234 not exist in the hash table or if the result field of the operation
1235 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1239 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1242 struct vn_nary_op_s vno1;
1247 vno1.opcode = TREE_CODE (op);
1248 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1249 vno1.type = TREE_TYPE (op);
1250 for (i = 0; i < vno1.length; ++i)
1251 vno1.op[i] = TREE_OPERAND (op, i);
1252 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1253 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1255 if (!slot && current_info == optimistic_info)
1256 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1261 *vnresult = (vn_nary_op_t)*slot;
1262 return ((vn_nary_op_t)*slot)->result;
1265 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1266 value number if it exists in the hash table. Return NULL_TREE if
1267 it does not exist in the hash table. VNRESULT will contain the
1268 vn_nary_op_t from the hashtable if it exists. */
1271 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1274 struct vn_nary_op_s vno1;
1279 vno1.opcode = gimple_assign_rhs_code (stmt);
1280 vno1.length = gimple_num_ops (stmt) - 1;
1281 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1282 for (i = 0; i < vno1.length; ++i)
1283 vno1.op[i] = gimple_op (stmt, i + 1);
1284 if (vno1.opcode == REALPART_EXPR
1285 || vno1.opcode == IMAGPART_EXPR
1286 || vno1.opcode == VIEW_CONVERT_EXPR)
1287 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1288 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1289 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1291 if (!slot && current_info == optimistic_info)
1292 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1297 *vnresult = (vn_nary_op_t)*slot;
1298 return ((vn_nary_op_t)*slot)->result;
1301 /* Insert a n-ary operation into the current hash table using it's
1302 pieces. Return the vn_nary_op_t structure we created and put in
1306 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1307 tree type, tree op0,
1308 tree op1, tree op2, tree op3,
1310 unsigned int value_id)
1315 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1316 (sizeof (struct vn_nary_op_s)
1317 - sizeof (tree) * (4 - length)));
1318 vno1->value_id = value_id;
1319 vno1->opcode = code;
1320 vno1->length = length;
1330 vno1->result = result;
1331 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1332 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1334 gcc_assert (!*slot);
1341 /* Insert OP into the current hash table with a value number of
1342 RESULT. Return the vn_nary_op_t structure we created and put in
1346 vn_nary_op_insert (tree op, tree result)
1348 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1353 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1354 (sizeof (struct vn_nary_op_s)
1355 - sizeof (tree) * (4 - length)));
1356 vno1->value_id = VN_INFO (result)->value_id;
1357 vno1->opcode = TREE_CODE (op);
1358 vno1->length = length;
1359 vno1->type = TREE_TYPE (op);
1360 for (i = 0; i < vno1->length; ++i)
1361 vno1->op[i] = TREE_OPERAND (op, i);
1362 vno1->result = result;
1363 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1364 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1366 gcc_assert (!*slot);
1372 /* Insert the rhs of STMT into the current hash table with a value number of
1376 vn_nary_op_insert_stmt (gimple stmt, tree result)
1378 unsigned length = gimple_num_ops (stmt) - 1;
1383 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1384 (sizeof (struct vn_nary_op_s)
1385 - sizeof (tree) * (4 - length)));
1386 vno1->value_id = VN_INFO (result)->value_id;
1387 vno1->opcode = gimple_assign_rhs_code (stmt);
1388 vno1->length = length;
1389 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1390 for (i = 0; i < vno1->length; ++i)
1391 vno1->op[i] = gimple_op (stmt, i + 1);
1392 if (vno1->opcode == REALPART_EXPR
1393 || vno1->opcode == IMAGPART_EXPR
1394 || vno1->opcode == VIEW_CONVERT_EXPR)
1395 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1396 vno1->result = result;
1397 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1398 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1400 gcc_assert (!*slot);
1406 /* Compute a hashcode for PHI operation VP1 and return it. */
1408 static inline hashval_t
1409 vn_phi_compute_hash (vn_phi_t vp1)
1411 hashval_t result = 0;
1416 result = vp1->block->index;
1418 /* If all PHI arguments are constants we need to distinguish
1419 the PHI node via its type. */
1420 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1421 result += (INTEGRAL_TYPE_P (type)
1422 + (INTEGRAL_TYPE_P (type)
1423 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1425 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1427 if (phi1op == VN_TOP)
1429 result += iterative_hash_expr (phi1op, result);
1435 /* Return the computed hashcode for phi operation P1. */
1438 vn_phi_hash (const void *p1)
1440 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1441 return vp1->hashcode;
1444 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1447 vn_phi_eq (const void *p1, const void *p2)
1449 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1450 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1452 if (vp1->block == vp2->block)
1457 /* If the PHI nodes do not have compatible types
1458 they are not the same. */
1459 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1460 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1463 /* Any phi in the same block will have it's arguments in the
1464 same edge order, because of how we store phi nodes. */
1465 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1467 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1468 if (phi1op == VN_TOP || phi2op == VN_TOP)
1470 if (!expressions_equal_p (phi1op, phi2op))
1478 static VEC(tree, heap) *shared_lookup_phiargs;
1480 /* Lookup PHI in the current hash table, and return the resulting
1481 value number if it exists in the hash table. Return NULL_TREE if
1482 it does not exist in the hash table. */
1485 vn_phi_lookup (gimple phi)
1488 struct vn_phi_s vp1;
1491 VEC_truncate (tree, shared_lookup_phiargs, 0);
1493 /* Canonicalize the SSA_NAME's to their value number. */
1494 for (i = 0; i < gimple_phi_num_args (phi); i++)
1496 tree def = PHI_ARG_DEF (phi, i);
1497 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1498 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1500 vp1.phiargs = shared_lookup_phiargs;
1501 vp1.block = gimple_bb (phi);
1502 vp1.hashcode = vn_phi_compute_hash (&vp1);
1503 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1505 if (!slot && current_info == optimistic_info)
1506 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1510 return ((vn_phi_t)*slot)->result;
1513 /* Insert PHI into the current hash table with a value number of
1517 vn_phi_insert (gimple phi, tree result)
1520 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1522 VEC (tree, heap) *args = NULL;
1524 /* Canonicalize the SSA_NAME's to their value number. */
1525 for (i = 0; i < gimple_phi_num_args (phi); i++)
1527 tree def = PHI_ARG_DEF (phi, i);
1528 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1529 VEC_safe_push (tree, heap, args, def);
1531 vp1->value_id = VN_INFO (result)->value_id;
1532 vp1->phiargs = args;
1533 vp1->block = gimple_bb (phi);
1534 vp1->result = result;
1535 vp1->hashcode = vn_phi_compute_hash (vp1);
1537 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1540 /* Because we iterate over phi operations more than once, it's
1541 possible the slot might already exist here, hence no assert.*/
1547 /* Print set of components in strongly connected component SCC to OUT. */
1550 print_scc (FILE *out, VEC (tree, heap) *scc)
1555 fprintf (out, "SCC consists of: ");
1556 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1558 print_generic_expr (out, var, 0);
1561 fprintf (out, "\n");
1564 /* Set the value number of FROM to TO, return true if it has changed
1568 set_ssa_val_to (tree from, tree to)
1573 && TREE_CODE (to) == SSA_NAME
1574 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1577 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1578 and invariants. So assert that here. */
1579 gcc_assert (to != NULL_TREE
1581 || TREE_CODE (to) == SSA_NAME
1582 || is_gimple_min_invariant (to)));
1584 if (dump_file && (dump_flags & TDF_DETAILS))
1586 fprintf (dump_file, "Setting value number of ");
1587 print_generic_expr (dump_file, from, 0);
1588 fprintf (dump_file, " to ");
1589 print_generic_expr (dump_file, to, 0);
1592 currval = SSA_VAL (from);
1594 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1596 SSA_VAL (from) = to;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, " (changed)\n");
1601 if (dump_file && (dump_flags & TDF_DETAILS))
1602 fprintf (dump_file, "\n");
1606 /* Set all definitions in STMT to value number to themselves.
1607 Return true if a value number changed. */
1610 defs_to_varying (gimple stmt)
1612 bool changed = false;
1616 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1618 tree def = DEF_FROM_PTR (defp);
1620 VN_INFO (def)->use_processed = true;
1621 changed |= set_ssa_val_to (def, def);
1626 static bool expr_has_constants (tree expr);
1627 static tree valueize_expr (tree expr);
1629 /* Visit a copy between LHS and RHS, return true if the value number
1633 visit_copy (tree lhs, tree rhs)
1635 /* Follow chains of copies to their destination. */
1636 while (TREE_CODE (rhs) == SSA_NAME
1637 && SSA_VAL (rhs) != rhs)
1638 rhs = SSA_VAL (rhs);
1640 /* The copy may have a more interesting constant filled expression
1641 (we don't, since we know our RHS is just an SSA name). */
1642 if (TREE_CODE (rhs) == SSA_NAME)
1644 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1645 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1648 return set_ssa_val_to (lhs, rhs);
1651 /* Visit a unary operator RHS, value number it, and return true if the
1652 value number of LHS has changed as a result. */
1655 visit_unary_op (tree lhs, gimple stmt)
1657 bool changed = false;
1658 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1662 changed = set_ssa_val_to (lhs, result);
1666 changed = set_ssa_val_to (lhs, lhs);
1667 vn_nary_op_insert_stmt (stmt, lhs);
1673 /* Visit a binary operator RHS, value number it, and return true if the
1674 value number of LHS has changed as a result. */
1677 visit_binary_op (tree lhs, gimple stmt)
1679 bool changed = false;
1680 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1684 changed = set_ssa_val_to (lhs, result);
1688 changed = set_ssa_val_to (lhs, lhs);
1689 vn_nary_op_insert_stmt (stmt, lhs);
1695 /* Visit a call STMT storing into LHS. Return true if the value number
1696 of the LHS has changed as a result. */
1699 visit_reference_op_call (tree lhs, gimple stmt)
1701 bool changed = false;
1702 struct vn_reference_s vr1;
1705 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1706 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1707 vr1.hashcode = vn_reference_compute_hash (&vr1);
1708 result = vn_reference_lookup_1 (&vr1, NULL);
1711 changed = set_ssa_val_to (lhs, result);
1712 if (TREE_CODE (result) == SSA_NAME
1713 && VN_INFO (result)->has_constants)
1714 VN_INFO (lhs)->has_constants = true;
1720 changed = set_ssa_val_to (lhs, lhs);
1721 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1722 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1723 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1724 vr2->hashcode = vr1.hashcode;
1726 slot = htab_find_slot_with_hash (current_info->references,
1727 vr2, vr2->hashcode, INSERT);
1729 free_reference (*slot);
1736 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1737 and return true if the value number of the LHS has changed as a result. */
1740 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1742 bool changed = false;
1743 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1746 /* We handle type-punning through unions by value-numbering based
1747 on offset and size of the access. Be prepared to handle a
1748 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1750 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1752 /* We will be setting the value number of lhs to the value number
1753 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1754 So first simplify and lookup this expression to see if it
1755 is already available. */
1756 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1757 if ((CONVERT_EXPR_P (val)
1758 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1759 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1761 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1762 if ((CONVERT_EXPR_P (tem)
1763 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1764 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1765 TREE_TYPE (val), tem)))
1769 if (!is_gimple_min_invariant (val)
1770 && TREE_CODE (val) != SSA_NAME)
1771 result = vn_nary_op_lookup (val, NULL);
1772 /* If the expression is not yet available, value-number lhs to
1773 a new SSA_NAME we create. */
1774 if (!result && may_insert)
1776 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1777 /* Initialize value-number information properly. */
1778 VN_INFO_GET (result)->valnum = result;
1779 VN_INFO (result)->value_id = get_next_value_id ();
1780 VN_INFO (result)->expr = val;
1781 VN_INFO (result)->has_constants = expr_has_constants (val);
1782 VN_INFO (result)->needs_insertion = true;
1783 /* As all "inserted" statements are singleton SCCs, insert
1784 to the valid table. This is strictly needed to
1785 avoid re-generating new value SSA_NAMEs for the same
1786 expression during SCC iteration over and over (the
1787 optimistic table gets cleared after each iteration).
1788 We do not need to insert into the optimistic table, as
1789 lookups there will fall back to the valid table. */
1790 if (current_info == optimistic_info)
1792 current_info = valid_info;
1793 vn_nary_op_insert (val, result);
1794 current_info = optimistic_info;
1797 vn_nary_op_insert (val, result);
1798 if (dump_file && (dump_flags & TDF_DETAILS))
1800 fprintf (dump_file, "Inserting name ");
1801 print_generic_expr (dump_file, result, 0);
1802 fprintf (dump_file, " for expression ");
1803 print_generic_expr (dump_file, val, 0);
1804 fprintf (dump_file, "\n");
1811 changed = set_ssa_val_to (lhs, result);
1812 if (TREE_CODE (result) == SSA_NAME
1813 && VN_INFO (result)->has_constants)
1815 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1816 VN_INFO (lhs)->has_constants = true;
1821 changed = set_ssa_val_to (lhs, lhs);
1822 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1829 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1830 and return true if the value number of the LHS has changed as a result. */
1833 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1835 bool changed = false;
1837 bool resultsame = false;
1839 /* First we want to lookup using the *vuses* from the store and see
1840 if there the last store to this location with the same address
1843 The vuses represent the memory state before the store. If the
1844 memory state, address, and value of the store is the same as the
1845 last store to this location, then this store will produce the
1846 same memory state as that store.
1848 In this case the vdef versions for this store are value numbered to those
1849 vuse versions, since they represent the same memory state after
1852 Otherwise, the vdefs for the store are used when inserting into
1853 the table, since the store generates a new memory state. */
1855 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1860 if (TREE_CODE (result) == SSA_NAME)
1861 result = SSA_VAL (result);
1862 if (TREE_CODE (op) == SSA_NAME)
1864 resultsame = expressions_equal_p (result, op);
1867 if (!result || !resultsame)
1869 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1873 if (dump_file && (dump_flags & TDF_DETAILS))
1875 fprintf (dump_file, "No store match\n");
1876 fprintf (dump_file, "Value numbering store ");
1877 print_generic_expr (dump_file, lhs, 0);
1878 fprintf (dump_file, " to ");
1879 print_generic_expr (dump_file, op, 0);
1880 fprintf (dump_file, "\n");
1882 /* Have to set value numbers before insert, since insert is
1883 going to valueize the references in-place. */
1884 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1886 VN_INFO (vdef)->use_processed = true;
1887 changed |= set_ssa_val_to (vdef, vdef);
1890 /* Do not insert structure copies into the tables. */
1891 if (is_gimple_min_invariant (op)
1892 || is_gimple_reg (op))
1893 vn_reference_insert (lhs, op, vdefs);
1897 /* We had a match, so value number the vdefs to have the value
1898 number of the vuses they came from. */
1899 ssa_op_iter op_iter;
1903 if (dump_file && (dump_flags & TDF_DETAILS))
1904 fprintf (dump_file, "Store matched earlier value,"
1905 "value numbering store vdefs to matching vuses.\n");
1907 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1909 tree def = DEF_FROM_PTR (var);
1912 /* Uh, if the vuse is a multiuse, we can't really do much
1913 here, sadly, since we don't know which value number of
1914 which vuse to use. */
1915 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1918 use = VUSE_ELEMENT_VAR (*vv, 0);
1920 VN_INFO (def)->use_processed = true;
1921 changed |= set_ssa_val_to (def, SSA_VAL (use));
1928 /* Visit and value number PHI, return true if the value number
1932 visit_phi (gimple phi)
1934 bool changed = false;
1936 tree sameval = VN_TOP;
1937 bool allsame = true;
1940 /* TODO: We could check for this in init_sccvn, and replace this
1941 with a gcc_assert. */
1942 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1943 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1945 /* See if all non-TOP arguments have the same value. TOP is
1946 equivalent to everything, so we can ignore it. */
1947 for (i = 0; i < gimple_phi_num_args (phi); i++)
1949 tree def = PHI_ARG_DEF (phi, i);
1951 if (TREE_CODE (def) == SSA_NAME)
1952 def = SSA_VAL (def);
1955 if (sameval == VN_TOP)
1961 if (!expressions_equal_p (def, sameval))
1969 /* If all value numbered to the same value, the phi node has that
1973 if (is_gimple_min_invariant (sameval))
1975 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1976 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1980 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1981 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1984 if (TREE_CODE (sameval) == SSA_NAME)
1985 return visit_copy (PHI_RESULT (phi), sameval);
1987 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1990 /* Otherwise, see if it is equivalent to a phi node in this block. */
1991 result = vn_phi_lookup (phi);
1994 if (TREE_CODE (result) == SSA_NAME)
1995 changed = visit_copy (PHI_RESULT (phi), result);
1997 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2001 vn_phi_insert (phi, PHI_RESULT (phi));
2002 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2003 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2004 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2010 /* Return true if EXPR contains constants. */
2013 expr_has_constants (tree expr)
2015 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2018 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2021 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2022 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2023 /* Constants inside reference ops are rarely interesting, but
2024 it can take a lot of looking to find them. */
2026 case tcc_declaration:
2029 return is_gimple_min_invariant (expr);
2034 /* Return true if STMT contains constants. */
2037 stmt_has_constants (gimple stmt)
2039 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2042 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2044 case GIMPLE_UNARY_RHS:
2045 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2047 case GIMPLE_BINARY_RHS:
2048 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2049 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2050 case GIMPLE_SINGLE_RHS:
2051 /* Constants inside reference ops are rarely interesting, but
2052 it can take a lot of looking to find them. */
2053 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2060 /* Replace SSA_NAMES in expr with their value numbers, and return the
2062 This is performed in place. */
2065 valueize_expr (tree expr)
2067 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2070 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2071 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2072 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2075 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2076 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2077 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2078 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2079 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2080 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2088 /* Simplify the binary expression RHS, and return the result if
2092 simplify_binary_expression (gimple stmt)
2094 tree result = NULL_TREE;
2095 tree op0 = gimple_assign_rhs1 (stmt);
2096 tree op1 = gimple_assign_rhs2 (stmt);
2098 /* This will not catch every single case we could combine, but will
2099 catch those with constants. The goal here is to simultaneously
2100 combine constants between expressions, but avoid infinite
2101 expansion of expressions during simplification. */
2102 if (TREE_CODE (op0) == SSA_NAME)
2104 if (VN_INFO (op0)->has_constants
2105 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2106 op0 = valueize_expr (vn_get_expr_for (op0));
2107 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2108 op0 = SSA_VAL (op0);
2111 if (TREE_CODE (op1) == SSA_NAME)
2113 if (VN_INFO (op1)->has_constants)
2114 op1 = valueize_expr (vn_get_expr_for (op1));
2115 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2116 op1 = SSA_VAL (op1);
2119 /* Avoid folding if nothing changed. */
2120 if (op0 == gimple_assign_rhs1 (stmt)
2121 && op1 == gimple_assign_rhs2 (stmt))
2124 fold_defer_overflow_warnings ();
2126 result = fold_binary (gimple_assign_rhs_code (stmt),
2127 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2129 STRIP_USELESS_TYPE_CONVERSION (result);
2131 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2134 /* Make sure result is not a complex expression consisting
2135 of operators of operators (IE (a + b) + (a + c))
2136 Otherwise, we will end up with unbounded expressions if
2137 fold does anything at all. */
2138 if (result && valid_gimple_rhs_p (result))
2144 /* Simplify the unary expression RHS, and return the result if
2148 simplify_unary_expression (gimple stmt)
2150 tree result = NULL_TREE;
2151 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2153 /* We handle some tcc_reference codes here that are all
2154 GIMPLE_ASSIGN_SINGLE codes. */
2155 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2156 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2157 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2158 op0 = TREE_OPERAND (op0, 0);
2160 if (TREE_CODE (op0) != SSA_NAME)
2164 if (VN_INFO (op0)->has_constants)
2165 op0 = valueize_expr (vn_get_expr_for (op0));
2166 else if (gimple_assign_cast_p (stmt)
2167 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2168 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2169 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2171 /* We want to do tree-combining on conversion-like expressions.
2172 Make sure we feed only SSA_NAMEs or constants to fold though. */
2173 tree tem = valueize_expr (vn_get_expr_for (op0));
2174 if (UNARY_CLASS_P (tem)
2175 || BINARY_CLASS_P (tem)
2176 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2177 || TREE_CODE (tem) == SSA_NAME
2178 || is_gimple_min_invariant (tem))
2182 /* Avoid folding if nothing changed, but remember the expression. */
2183 if (op0 == orig_op0)
2186 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2187 gimple_expr_type (stmt), op0);
2190 STRIP_USELESS_TYPE_CONVERSION (result);
2191 if (valid_gimple_rhs_p (result))
2198 /* Try to simplify RHS using equivalences and constant folding. */
2201 try_to_simplify (gimple stmt)
2205 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2206 in this case, there is no point in doing extra work. */
2207 if (gimple_assign_copy_p (stmt)
2208 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2211 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2213 case tcc_declaration:
2214 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2220 /* Do not do full-blown reference lookup here, but simplify
2221 reads from constant aggregates. */
2222 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2226 /* Fallthrough for some codes that can operate on registers. */
2227 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2228 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2229 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2231 /* We could do a little more with unary ops, if they expand
2232 into binary ops, but it's debatable whether it is worth it. */
2234 return simplify_unary_expression (stmt);
2236 case tcc_comparison:
2238 return simplify_binary_expression (stmt);
2247 /* Visit and value number USE, return true if the value number
2251 visit_use (tree use)
2253 bool changed = false;
2254 gimple stmt = SSA_NAME_DEF_STMT (use);
2256 VN_INFO (use)->use_processed = true;
2258 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2259 if (dump_file && (dump_flags & TDF_DETAILS)
2260 && !SSA_NAME_IS_DEFAULT_DEF (use))
2262 fprintf (dump_file, "Value numbering ");
2263 print_generic_expr (dump_file, use, 0);
2264 fprintf (dump_file, " stmt = ");
2265 print_gimple_stmt (dump_file, stmt, 0, 0);
2268 /* Handle uninitialized uses. */
2269 if (SSA_NAME_IS_DEFAULT_DEF (use))
2270 changed = set_ssa_val_to (use, use);
2273 if (gimple_code (stmt) == GIMPLE_PHI)
2274 changed = visit_phi (stmt);
2275 else if (!gimple_has_lhs (stmt)
2276 || gimple_has_volatile_ops (stmt)
2277 || stmt_could_throw_p (stmt))
2278 changed = defs_to_varying (stmt);
2279 else if (is_gimple_assign (stmt))
2281 tree lhs = gimple_assign_lhs (stmt);
2284 /* Shortcut for copies. Simplifying copies is pointless,
2285 since we copy the expression and value they represent. */
2286 if (gimple_assign_copy_p (stmt)
2287 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2288 && TREE_CODE (lhs) == SSA_NAME)
2290 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2293 simplified = try_to_simplify (stmt);
2296 if (dump_file && (dump_flags & TDF_DETAILS))
2298 fprintf (dump_file, "RHS ");
2299 print_gimple_expr (dump_file, stmt, 0, 0);
2300 fprintf (dump_file, " simplified to ");
2301 print_generic_expr (dump_file, simplified, 0);
2302 if (TREE_CODE (lhs) == SSA_NAME)
2303 fprintf (dump_file, " has constants %d\n",
2304 expr_has_constants (simplified));
2306 fprintf (dump_file, "\n");
2309 /* Setting value numbers to constants will occasionally
2310 screw up phi congruence because constants are not
2311 uniquely associated with a single ssa name that can be
2314 && is_gimple_min_invariant (simplified)
2315 && TREE_CODE (lhs) == SSA_NAME)
2317 VN_INFO (lhs)->expr = simplified;
2318 VN_INFO (lhs)->has_constants = true;
2319 changed = set_ssa_val_to (lhs, simplified);
2323 && TREE_CODE (simplified) == SSA_NAME
2324 && TREE_CODE (lhs) == SSA_NAME)
2326 changed = visit_copy (lhs, simplified);
2329 else if (simplified)
2331 if (TREE_CODE (lhs) == SSA_NAME)
2333 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2334 /* We have to unshare the expression or else
2335 valuizing may change the IL stream. */
2336 VN_INFO (lhs)->expr = unshare_expr (simplified);
2339 else if (stmt_has_constants (stmt)
2340 && TREE_CODE (lhs) == SSA_NAME)
2341 VN_INFO (lhs)->has_constants = true;
2342 else if (TREE_CODE (lhs) == SSA_NAME)
2344 /* We reset expr and constantness here because we may
2345 have been value numbering optimistically, and
2346 iterating. They may become non-constant in this case,
2347 even if they were optimistically constant. */
2349 VN_INFO (lhs)->has_constants = false;
2350 VN_INFO (lhs)->expr = NULL_TREE;
2353 if (TREE_CODE (lhs) == SSA_NAME
2354 /* We can substitute SSA_NAMEs that are live over
2355 abnormal edges with their constant value. */
2356 && !(gimple_assign_copy_p (stmt)
2357 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2359 && is_gimple_min_invariant (simplified))
2360 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2361 changed = defs_to_varying (stmt);
2362 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2364 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2366 else if (TREE_CODE (lhs) == SSA_NAME)
2368 if ((gimple_assign_copy_p (stmt)
2369 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2371 && is_gimple_min_invariant (simplified)))
2373 VN_INFO (lhs)->has_constants = true;
2375 changed = set_ssa_val_to (lhs, simplified);
2377 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2381 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2383 case GIMPLE_UNARY_RHS:
2384 changed = visit_unary_op (lhs, stmt);
2386 case GIMPLE_BINARY_RHS:
2387 changed = visit_binary_op (lhs, stmt);
2389 case GIMPLE_SINGLE_RHS:
2390 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2393 /* VOP-less references can go through unary case. */
2394 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2395 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2396 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2397 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2399 changed = visit_unary_op (lhs, stmt);
2403 case tcc_declaration:
2404 changed = visit_reference_op_load
2405 (lhs, gimple_assign_rhs1 (stmt), stmt);
2407 case tcc_expression:
2408 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2410 changed = visit_unary_op (lhs, stmt);
2415 changed = defs_to_varying (stmt);
2419 changed = defs_to_varying (stmt);
2425 changed = defs_to_varying (stmt);
2427 else if (is_gimple_call (stmt))
2429 tree lhs = gimple_call_lhs (stmt);
2431 /* ??? We could try to simplify calls. */
2433 if (stmt_has_constants (stmt)
2434 && TREE_CODE (lhs) == SSA_NAME)
2435 VN_INFO (lhs)->has_constants = true;
2436 else if (TREE_CODE (lhs) == SSA_NAME)
2438 /* We reset expr and constantness here because we may
2439 have been value numbering optimistically, and
2440 iterating. They may become non-constant in this case,
2441 even if they were optimistically constant. */
2442 VN_INFO (lhs)->has_constants = false;
2443 VN_INFO (lhs)->expr = NULL_TREE;
2446 if (TREE_CODE (lhs) == SSA_NAME
2447 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2448 changed = defs_to_varying (stmt);
2449 /* ??? We should handle stores from calls. */
2450 else if (TREE_CODE (lhs) == SSA_NAME)
2452 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2453 changed = visit_reference_op_call (lhs, stmt);
2455 changed = defs_to_varying (stmt);
2458 changed = defs_to_varying (stmt);
2465 /* Compare two operands by reverse postorder index */
2468 compare_ops (const void *pa, const void *pb)
2470 const tree opa = *((const tree *)pa);
2471 const tree opb = *((const tree *)pb);
2472 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2473 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2477 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2479 else if (gimple_nop_p (opstmta))
2481 else if (gimple_nop_p (opstmtb))
2484 bba = gimple_bb (opstmta);
2485 bbb = gimple_bb (opstmtb);
2496 if (gimple_code (opstmta) == GIMPLE_PHI
2497 && gimple_code (opstmtb) == GIMPLE_PHI)
2499 else if (gimple_code (opstmta) == GIMPLE_PHI)
2501 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2503 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2505 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2508 /* Sort an array containing members of a strongly connected component
2509 SCC so that the members are ordered by RPO number.
2510 This means that when the sort is complete, iterating through the
2511 array will give you the members in RPO order. */
2514 sort_scc (VEC (tree, heap) *scc)
2516 qsort (VEC_address (tree, scc),
2517 VEC_length (tree, scc),
2522 /* Process a strongly connected component in the SSA graph. */
2525 process_scc (VEC (tree, heap) *scc)
2527 /* If the SCC has a single member, just visit it. */
2529 if (VEC_length (tree, scc) == 1)
2531 tree use = VEC_index (tree, scc, 0);
2532 if (!VN_INFO (use)->use_processed)
2539 unsigned int iterations = 0;
2540 bool changed = true;
2542 /* Iterate over the SCC with the optimistic table until it stops
2544 current_info = optimistic_info;
2549 /* As we are value-numbering optimistically we have to
2550 clear the expression tables and the simplified expressions
2551 in each iteration until we converge. */
2552 htab_empty (optimistic_info->nary);
2553 htab_empty (optimistic_info->phis);
2554 htab_empty (optimistic_info->references);
2555 obstack_free (&optimistic_info->nary_obstack, NULL);
2556 gcc_obstack_init (&optimistic_info->nary_obstack);
2557 empty_alloc_pool (optimistic_info->phis_pool);
2558 empty_alloc_pool (optimistic_info->references_pool);
2559 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2560 VN_INFO (var)->expr = NULL_TREE;
2561 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2562 changed |= visit_use (var);
2565 statistics_histogram_event (cfun, "SCC iterations", iterations);
2567 /* Finally, visit the SCC once using the valid table. */
2568 current_info = valid_info;
2569 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2574 DEF_VEC_O(ssa_op_iter);
2575 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2577 /* Pop the components of the found SCC for NAME off the SCC stack
2578 and process them. Returns true if all went well, false if
2579 we run into resource limits. */
2582 extract_and_process_scc_for_name (tree name)
2584 VEC (tree, heap) *scc = NULL;
2587 /* Found an SCC, pop the components off the SCC stack and
2591 x = VEC_pop (tree, sccstack);
2593 VN_INFO (x)->on_sccstack = false;
2594 VEC_safe_push (tree, heap, scc, x);
2595 } while (x != name);
2597 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2598 if (VEC_length (tree, scc)
2599 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2602 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2603 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2604 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2608 if (VEC_length (tree, scc) > 1)
2611 if (dump_file && (dump_flags & TDF_DETAILS))
2612 print_scc (dump_file, scc);
2616 VEC_free (tree, heap, scc);
2621 /* Depth first search on NAME to discover and process SCC's in the SSA
2623 Execution of this algorithm relies on the fact that the SCC's are
2624 popped off the stack in topological order.
2625 Returns true if successful, false if we stopped processing SCC's due
2626 to resource constraints. */
2631 VEC(ssa_op_iter, heap) *itervec = NULL;
2632 VEC(tree, heap) *namevec = NULL;
2633 use_operand_p usep = NULL;
2640 VN_INFO (name)->dfsnum = next_dfs_num++;
2641 VN_INFO (name)->visited = true;
2642 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2644 VEC_safe_push (tree, heap, sccstack, name);
2645 VN_INFO (name)->on_sccstack = true;
2646 defstmt = SSA_NAME_DEF_STMT (name);
2648 /* Recursively DFS on our operands, looking for SCC's. */
2649 if (!gimple_nop_p (defstmt))
2651 /* Push a new iterator. */
2652 if (gimple_code (defstmt) == GIMPLE_PHI)
2653 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2655 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2658 clear_and_done_ssa_iter (&iter);
2662 /* If we are done processing uses of a name, go up the stack
2663 of iterators and process SCCs as we found them. */
2664 if (op_iter_done (&iter))
2666 /* See if we found an SCC. */
2667 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2668 if (!extract_and_process_scc_for_name (name))
2670 VEC_free (tree, heap, namevec);
2671 VEC_free (ssa_op_iter, heap, itervec);
2675 /* Check if we are done. */
2676 if (VEC_empty (tree, namevec))
2678 VEC_free (tree, heap, namevec);
2679 VEC_free (ssa_op_iter, heap, itervec);
2683 /* Restore the last use walker and continue walking there. */
2685 name = VEC_pop (tree, namevec);
2686 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2687 sizeof (ssa_op_iter));
2688 VEC_pop (ssa_op_iter, itervec);
2689 goto continue_walking;
2692 use = USE_FROM_PTR (usep);
2694 /* Since we handle phi nodes, we will sometimes get
2695 invariants in the use expression. */
2696 if (TREE_CODE (use) == SSA_NAME)
2698 if (! (VN_INFO (use)->visited))
2700 /* Recurse by pushing the current use walking state on
2701 the stack and starting over. */
2702 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2703 VEC_safe_push(tree, heap, namevec, name);
2708 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2709 VN_INFO (use)->low);
2711 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2712 && VN_INFO (use)->on_sccstack)
2714 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2715 VN_INFO (name)->low);
2719 usep = op_iter_next_use (&iter);
2723 /* Allocate a value number table. */
2726 allocate_vn_table (vn_tables_t table)
2728 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2729 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2730 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2733 gcc_obstack_init (&table->nary_obstack);
2734 table->phis_pool = create_alloc_pool ("VN phis",
2735 sizeof (struct vn_phi_s),
2737 table->references_pool = create_alloc_pool ("VN references",
2738 sizeof (struct vn_reference_s),
2742 /* Free a value number table. */
2745 free_vn_table (vn_tables_t table)
2747 htab_delete (table->phis);
2748 htab_delete (table->nary);
2749 htab_delete (table->references);
2750 obstack_free (&table->nary_obstack, NULL);
2751 free_alloc_pool (table->phis_pool);
2752 free_alloc_pool (table->references_pool);
2760 int *rpo_numbers_temp;
2762 calculate_dominance_info (CDI_DOMINATORS);
2764 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2767 constant_value_ids = BITMAP_ALLOC (NULL);
2772 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2773 /* VEC_alloc doesn't actually grow it to the right size, it just
2774 preallocates the space to do so. */
2775 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2776 gcc_obstack_init (&vn_ssa_aux_obstack);
2778 shared_lookup_phiargs = NULL;
2779 shared_lookup_vops = NULL;
2780 shared_lookup_references = NULL;
2781 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2782 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2783 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2785 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2786 the i'th block in RPO order is bb. We want to map bb's to RPO
2787 numbers, so we need to rearrange this array. */
2788 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2789 rpo_numbers[rpo_numbers_temp[j]] = j;
2791 XDELETE (rpo_numbers_temp);
2793 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2795 /* Create the VN_INFO structures, and initialize value numbers to
2797 for (i = 0; i < num_ssa_names; i++)
2799 tree name = ssa_name (i);
2802 VN_INFO_GET (name)->valnum = VN_TOP;
2803 VN_INFO (name)->expr = NULL_TREE;
2804 VN_INFO (name)->value_id = 0;
2808 renumber_gimple_stmt_uids ();
2810 /* Create the valid and optimistic value numbering tables. */
2811 valid_info = XCNEW (struct vn_tables_s);
2812 allocate_vn_table (valid_info);
2813 optimistic_info = XCNEW (struct vn_tables_s);
2814 allocate_vn_table (optimistic_info);
2822 htab_delete (constant_to_value_id);
2823 BITMAP_FREE (constant_value_ids);
2824 VEC_free (tree, heap, shared_lookup_phiargs);
2825 VEC_free (tree, gc, shared_lookup_vops);
2826 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2827 XDELETEVEC (rpo_numbers);
2829 for (i = 0; i < num_ssa_names; i++)
2831 tree name = ssa_name (i);
2833 && VN_INFO (name)->needs_insertion)
2834 release_ssa_name (name);
2836 obstack_free (&vn_ssa_aux_obstack, NULL);
2837 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2839 VEC_free (tree, heap, sccstack);
2840 free_vn_table (valid_info);
2841 XDELETE (valid_info);
2842 free_vn_table (optimistic_info);
2843 XDELETE (optimistic_info);
2846 /* Set the value ids in the valid hash tables. */
2849 set_hashtable_value_ids (void)
2856 /* Now set the value ids of the things we had put in the hash
2859 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2860 vno, vn_nary_op_t, hi)
2864 if (TREE_CODE (vno->result) == SSA_NAME)
2865 vno->value_id = VN_INFO (vno->result)->value_id;
2866 else if (is_gimple_min_invariant (vno->result))
2867 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2871 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2876 if (TREE_CODE (vp->result) == SSA_NAME)
2877 vp->value_id = VN_INFO (vp->result)->value_id;
2878 else if (is_gimple_min_invariant (vp->result))
2879 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2883 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2884 vr, vn_reference_t, hi)
2888 if (TREE_CODE (vr->result) == SSA_NAME)
2889 vr->value_id = VN_INFO (vr->result)->value_id;
2890 else if (is_gimple_min_invariant (vr->result))
2891 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2896 /* Do SCCVN. Returns true if it finished, false if we bailed out
2897 due to resource constraints. */
2900 run_scc_vn (bool may_insert_arg)
2904 bool changed = true;
2906 may_insert = may_insert_arg;
2909 current_info = valid_info;
2911 for (param = DECL_ARGUMENTS (current_function_decl);
2913 param = TREE_CHAIN (param))
2915 if (gimple_default_def (cfun, param) != NULL)
2917 tree def = gimple_default_def (cfun, param);
2918 SSA_VAL (def) = def;
2922 for (i = 1; i < num_ssa_names; ++i)
2924 tree name = ssa_name (i);
2926 && VN_INFO (name)->visited == false
2927 && !has_zero_uses (name))
2936 /* Initialize the value ids. */
2938 for (i = 1; i < num_ssa_names; ++i)
2940 tree name = ssa_name (i);
2944 info = VN_INFO (name);
2945 if (info->valnum == name)
2946 info->value_id = get_next_value_id ();
2947 else if (is_gimple_min_invariant (info->valnum))
2948 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2951 /* Propagate until they stop changing. */
2955 for (i = 1; i < num_ssa_names; ++i)
2957 tree name = ssa_name (i);
2961 info = VN_INFO (name);
2962 if (TREE_CODE (info->valnum) == SSA_NAME
2963 && info->valnum != name
2964 && info->value_id != VN_INFO (info->valnum)->value_id)
2967 info->value_id = VN_INFO (info->valnum)->value_id;
2972 set_hashtable_value_ids ();
2974 if (dump_file && (dump_flags & TDF_DETAILS))
2976 fprintf (dump_file, "Value numbers:\n");
2977 for (i = 0; i < num_ssa_names; i++)
2979 tree name = ssa_name (i);
2981 && VN_INFO (name)->visited
2982 && SSA_VAL (name) != name)
2984 print_generic_expr (dump_file, name, 0);
2985 fprintf (dump_file, " = ");
2986 print_generic_expr (dump_file, SSA_VAL (name), 0);
2987 fprintf (dump_file, "\n");
2996 /* Return the maximum value id we have ever seen. */
2999 get_max_value_id (void)
3001 return next_value_id;
3004 /* Return the next unique value id. */
3007 get_next_value_id (void)
3009 return next_value_id++;
3013 /* Compare two expressions E1 and E2 and return true if they are equal. */
3016 expressions_equal_p (tree e1, tree e2)
3018 /* The obvious case. */
3022 /* If only one of them is null, they cannot be equal. */
3026 /* Recurse on elements of lists. */
3027 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3031 for (lop1 = e1, lop2 = e2;
3033 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3037 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3043 /* Now perform the actual comparison. */
3044 if (TREE_CODE (e1) == TREE_CODE (e2)
3045 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3051 /* Sort the VUSE array so that we can do equality comparisons
3052 quicker on two vuse vecs. */
3055 sort_vuses (VEC (tree,gc) *vuses)
3057 if (VEC_length (tree, vuses) > 1)
3058 qsort (VEC_address (tree, vuses),
3059 VEC_length (tree, vuses),
3064 /* Sort the VUSE array so that we can do equality comparisons
3065 quicker on two vuse vecs. */
3068 sort_vuses_heap (VEC (tree,heap) *vuses)
3070 if (VEC_length (tree, vuses) > 1)
3071 qsort (VEC_address (tree, vuses),
3072 VEC_length (tree, vuses),
3078 /* Return true if the nary operation NARY may trap. This is a copy
3079 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3082 vn_nary_may_trap (vn_nary_op_t nary)
3086 bool honor_nans = false;
3087 bool honor_snans = false;
3088 bool fp_operation = false;
3089 bool honor_trapv = false;
3093 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3094 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3095 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3098 fp_operation = FLOAT_TYPE_P (type);
3101 honor_nans = flag_trapping_math && !flag_finite_math_only;
3102 honor_snans = flag_signaling_nans != 0;
3104 else if (INTEGRAL_TYPE_P (type)
3105 && TYPE_OVERFLOW_TRAPS (type))
3109 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3111 honor_nans, honor_snans, rhs2,
3117 for (i = 0; i < nary->length; ++i)
3118 if (tree_could_trap_p (nary->op[i]))