1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
284 if (expr == NULL_TREE)
287 /* Cache the expression. */
294 /* Free a phi operation structure VP. */
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
339 get_constant_value_id (tree constant)
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
349 return ((vn_constant_t)*slot)->value_id;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
357 get_or_alloc_constant_value_id (tree constant)
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
373 bitmap_set_bit (constant_value_ids, vc->value_id);
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1)
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
493 base = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
495 base = build_int_cst (ptr_type_node, 0);
497 memset (&temp, 0, sizeof (temp));
498 /* We do not care for spurious type qualifications. */
499 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
500 temp.opcode = TREE_CODE (ref);
501 temp.op0 = TMR_INDEX (ref);
502 temp.op1 = TMR_STEP (ref);
503 temp.op2 = TMR_OFFSET (ref);
504 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
506 memset (&temp, 0, sizeof (temp));
507 temp.type = NULL_TREE;
508 temp.opcode = TREE_CODE (base);
510 temp.op1 = TMR_ORIGINAL (ref);
511 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
515 /* For non-calls, store the information that makes up the address. */
519 vn_reference_op_s temp;
521 memset (&temp, 0, sizeof (temp));
522 /* We do not care for spurious type qualifications. */
523 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
524 temp.opcode = TREE_CODE (ref);
528 case ALIGN_INDIRECT_REF:
530 /* The only operand is the address, which gets its own
531 vn_reference_op_s structure. */
533 case MISALIGNED_INDIRECT_REF:
534 temp.op0 = TREE_OPERAND (ref, 1);
537 /* Record bits and position. */
538 temp.op0 = TREE_OPERAND (ref, 1);
539 temp.op1 = TREE_OPERAND (ref, 2);
542 /* The field decl is enough to unambiguously specify the field,
543 a matching type is not necessary and a mismatching type
544 is always a spurious difference. */
545 temp.type = NULL_TREE;
546 /* If this is a reference to a union member, record the union
547 member size as operand. Do so only if we are doing
548 expression insertion (during FRE), as PRE currently gets
549 confused with this. */
551 && TREE_OPERAND (ref, 2) == NULL_TREE
552 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
553 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
554 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
555 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
558 /* Record field as operand. */
559 temp.op0 = TREE_OPERAND (ref, 1);
560 temp.op1 = TREE_OPERAND (ref, 2);
563 case ARRAY_RANGE_REF:
565 /* Record index as operand. */
566 temp.op0 = TREE_OPERAND (ref, 1);
567 temp.op1 = TREE_OPERAND (ref, 2);
568 temp.op2 = TREE_OPERAND (ref, 3);
586 if (is_gimple_min_invariant (ref))
592 /* These are only interesting for their operands, their
593 existence, and their type. They will never be the last
594 ref in the chain of references (IE they require an
595 operand), so we don't have to put anything
596 for op* as it will be handled by the iteration */
599 case VIEW_CONVERT_EXPR:
604 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
606 if (REFERENCE_CLASS_P (ref)
607 || (TREE_CODE (ref) == ADDR_EXPR
608 && !is_gimple_min_invariant (ref)))
609 ref = TREE_OPERAND (ref, 0);
615 /* Re-create a reference tree from the reference ops OPS.
616 Returns NULL_TREE if the ops were not handled.
617 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
620 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
622 vn_reference_op_t op;
624 tree ref, *op0_p = &ref;
626 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
633 case ALIGN_INDIRECT_REF:
635 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
636 op0_p = &TREE_OPERAND (*op0_p, 0);
639 case MISALIGNED_INDIRECT_REF:
640 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
642 op0_p = &TREE_OPERAND (*op0_p, 0);
646 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
648 op0_p = &TREE_OPERAND (*op0_p, 0);
652 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
654 op0_p = &TREE_OPERAND (*op0_p, 0);
657 case ARRAY_RANGE_REF:
659 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
660 op->op0, op->op1, op->op2);
661 op0_p = &TREE_OPERAND (*op0_p, 0);
681 if (op->op0 != NULL_TREE)
683 gcc_assert (is_gimple_min_invariant (op->op0));
690 case VIEW_CONVERT_EXPR:
691 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
692 op0_p = &TREE_OPERAND (*op0_p, 0);
703 /* Copy the operations present in load/store/call REF into RESULT, a vector of
704 vn_reference_op_s's. */
707 copy_reference_ops_from_call (gimple call,
708 VEC(vn_reference_op_s, heap) **result)
710 vn_reference_op_s temp;
713 /* Copy the type, opcode, function being called and static chain. */
714 memset (&temp, 0, sizeof (temp));
715 temp.type = gimple_call_return_type (call);
716 temp.opcode = CALL_EXPR;
717 temp.op0 = gimple_call_fn (call);
718 temp.op1 = gimple_call_chain (call);
719 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
721 /* Copy the call arguments. As they can be references as well,
722 just chain them together. */
723 for (i = 0; i < gimple_call_num_args (call); ++i)
725 tree callarg = gimple_call_arg (call, i);
726 copy_reference_ops_from_ref (callarg, result);
730 /* Create a vector of vn_reference_op_s structures from REF, a
731 REFERENCE_CLASS_P tree. The vector is not shared. */
733 static VEC(vn_reference_op_s, heap) *
734 create_reference_ops_from_ref (tree ref)
736 VEC (vn_reference_op_s, heap) *result = NULL;
738 copy_reference_ops_from_ref (ref, &result);
742 /* Create a vector of vn_reference_op_s structures from CALL, a
743 call statement. The vector is not shared. */
745 static VEC(vn_reference_op_s, heap) *
746 create_reference_ops_from_call (gimple call)
748 VEC (vn_reference_op_s, heap) *result = NULL;
750 copy_reference_ops_from_call (call, &result);
754 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
755 *I_P to point to the last element of the replacement. */
757 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
760 VEC(vn_reference_op_s, heap) *mem = NULL;
761 vn_reference_op_t op;
762 unsigned int i = *i_p;
765 /* Get ops for the addressed object. */
766 op = VEC_index (vn_reference_op_s, *ops, i);
767 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
768 around it to avoid later ICEs. */
769 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
770 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
772 vn_reference_op_s aref;
774 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
775 aref.opcode = ARRAY_REF;
776 aref.op0 = integer_zero_node;
777 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
778 && TYPE_MIN_VALUE (dom))
779 aref.op0 = TYPE_MIN_VALUE (dom);
780 aref.op1 = NULL_TREE;
781 aref.op2 = NULL_TREE;
782 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
784 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
786 /* Do the replacement - we should have at least one op in mem now. */
787 if (VEC_length (vn_reference_op_s, mem) == 1)
789 VEC_replace (vn_reference_op_s, *ops, i - 1,
790 VEC_index (vn_reference_op_s, mem, 0));
791 VEC_ordered_remove (vn_reference_op_s, *ops, i);
794 else if (VEC_length (vn_reference_op_s, mem) == 2)
796 VEC_replace (vn_reference_op_s, *ops, i - 1,
797 VEC_index (vn_reference_op_s, mem, 0));
798 VEC_replace (vn_reference_op_s, *ops, i,
799 VEC_index (vn_reference_op_s, mem, 1));
801 else if (VEC_length (vn_reference_op_s, mem) > 2)
803 VEC_replace (vn_reference_op_s, *ops, i - 1,
804 VEC_index (vn_reference_op_s, mem, 0));
805 VEC_replace (vn_reference_op_s, *ops, i,
806 VEC_index (vn_reference_op_s, mem, 1));
807 /* ??? There is no VEC_splice. */
808 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
809 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
814 VEC_free (vn_reference_op_s, heap, mem);
818 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
819 structures into their value numbers. This is done in-place, and
820 the vector passed in is returned. */
822 static VEC (vn_reference_op_s, heap) *
823 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
825 vn_reference_op_t vro;
828 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
830 if (vro->opcode == SSA_NAME
831 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
833 vro->op0 = SSA_VAL (vro->op0);
834 /* If it transforms from an SSA_NAME to a constant, update
836 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
837 vro->opcode = TREE_CODE (vro->op0);
838 /* If it transforms from an SSA_NAME to an address, fold with
839 a preceding indirect reference. */
840 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
841 && VEC_index (vn_reference_op_s,
842 orig, i - 1)->opcode == INDIRECT_REF)
844 vn_reference_fold_indirect (&orig, &i);
848 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
849 vro->op1 = SSA_VAL (vro->op1);
850 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
851 vro->op2 = SSA_VAL (vro->op2);
857 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
859 /* Create a vector of vn_reference_op_s structures from REF, a
860 REFERENCE_CLASS_P tree. The vector is shared among all callers of
863 static VEC(vn_reference_op_s, heap) *
864 valueize_shared_reference_ops_from_ref (tree ref)
868 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
869 copy_reference_ops_from_ref (ref, &shared_lookup_references);
870 shared_lookup_references = valueize_refs (shared_lookup_references);
871 return shared_lookup_references;
874 /* Create a vector of vn_reference_op_s structures from CALL, a
875 call statement. The vector is shared among all callers of
878 static VEC(vn_reference_op_s, heap) *
879 valueize_shared_reference_ops_from_call (gimple call)
883 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
884 copy_reference_ops_from_call (call, &shared_lookup_references);
885 shared_lookup_references = valueize_refs (shared_lookup_references);
886 return shared_lookup_references;
889 /* Lookup a SCCVN reference operation VR in the current hash table.
890 Returns the resulting value number if it exists in the hash table,
891 NULL_TREE otherwise. VNRESULT will be filled in with the actual
892 vn_reference_t stored in the hashtable if something is found. */
895 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
901 slot = htab_find_slot_with_hash (current_info->references, vr,
903 if (!slot && current_info == optimistic_info)
904 slot = htab_find_slot_with_hash (valid_info->references, vr,
909 *vnresult = (vn_reference_t)*slot;
910 return ((vn_reference_t)*slot)->result;
916 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
917 with the current VUSE and performs the expression lookup. */
920 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
922 vn_reference_t vr = (vn_reference_t)vr_;
926 /* Fixup vuse and hash. */
927 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
928 vr->vuse = SSA_VAL (vuse);
929 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
932 slot = htab_find_slot_with_hash (current_info->references, vr,
934 if (!slot && current_info == optimistic_info)
935 slot = htab_find_slot_with_hash (valid_info->references, vr,
943 /* Lookup a reference operation by it's parts, in the current hash table.
944 Returns the resulting value number if it exists in the hash table,
945 NULL_TREE otherwise. VNRESULT will be filled in with the actual
946 vn_reference_t stored in the hashtable if something is found. */
949 vn_reference_lookup_pieces (tree vuse,
950 VEC (vn_reference_op_s, heap) *operands,
951 vn_reference_t *vnresult, bool maywalk)
953 struct vn_reference_s vr1;
960 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
961 vr1.operands = valueize_refs (operands);
962 vr1.hashcode = vn_reference_compute_hash (&vr1);
963 vn_reference_lookup_1 (&vr1, vnresult);
969 tree ref = get_ref_from_reference_ops (operands);
973 (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
974 vn_reference_lookup_2, &vr1);
978 return (*vnresult)->result;
983 /* Lookup OP in the current hash table, and return the resulting value
984 number if it exists in the hash table. Return NULL_TREE if it does
985 not exist in the hash table or if the result field of the structure
986 was NULL.. VNRESULT will be filled in with the vn_reference_t
987 stored in the hashtable if one exists. */
990 vn_reference_lookup (tree op, tree vuse, bool maywalk,
991 vn_reference_t *vnresult)
993 struct vn_reference_s vr1;
998 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
999 vr1.operands = valueize_shared_reference_ops_from_ref (op);
1000 vr1.hashcode = vn_reference_compute_hash (&vr1);
1005 vn_reference_t wvnresult;
1007 (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
1008 vn_reference_lookup_2, &vr1);
1012 *vnresult = wvnresult;
1013 return wvnresult->result;
1019 return vn_reference_lookup_1 (&vr1, vnresult);
1023 /* Insert OP into the current hash table with a value number of
1024 RESULT, and return the resulting reference structure we created. */
1027 vn_reference_insert (tree op, tree result, tree vuse)
1032 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1033 if (TREE_CODE (result) == SSA_NAME)
1034 vr1->value_id = VN_INFO (result)->value_id;
1036 vr1->value_id = get_or_alloc_constant_value_id (result);
1037 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1038 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1039 vr1->hashcode = vn_reference_compute_hash (vr1);
1040 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1042 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1045 /* Because we lookup stores using vuses, and value number failures
1046 using the vdefs (see visit_reference_op_store for how and why),
1047 it's possible that on failure we may try to insert an already
1048 inserted store. This is not wrong, there is no ssa name for a
1049 store that we could use as a differentiator anyway. Thus, unlike
1050 the other lookup functions, you cannot gcc_assert (!*slot)
1053 /* But free the old slot in case of a collision. */
1055 free_reference (*slot);
1061 /* Insert a reference by it's pieces into the current hash table with
1062 a value number of RESULT. Return the resulting reference
1063 structure we created. */
1066 vn_reference_insert_pieces (tree vuse,
1067 VEC (vn_reference_op_s, heap) *operands,
1068 tree result, unsigned int value_id)
1074 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1075 vr1->value_id = value_id;
1076 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1077 vr1->operands = valueize_refs (operands);
1078 vr1->hashcode = vn_reference_compute_hash (vr1);
1079 if (result && TREE_CODE (result) == SSA_NAME)
1080 result = SSA_VAL (result);
1081 vr1->result = result;
1083 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1086 /* At this point we should have all the things inserted that we have
1087 seen before, and we should never try inserting something that
1089 gcc_assert (!*slot);
1091 free_reference (*slot);
1097 /* Compute and return the hash value for nary operation VBO1. */
1100 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1105 for (i = 0; i < vno1->length; ++i)
1106 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1107 vno1->op[i] = SSA_VAL (vno1->op[i]);
1109 if (vno1->length == 2
1110 && commutative_tree_code (vno1->opcode)
1111 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1113 tree temp = vno1->op[0];
1114 vno1->op[0] = vno1->op[1];
1118 for (i = 0; i < vno1->length; ++i)
1119 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1124 /* Return the computed hashcode for nary operation P1. */
1127 vn_nary_op_hash (const void *p1)
1129 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1130 return vno1->hashcode;
1133 /* Compare nary operations P1 and P2 and return true if they are
1137 vn_nary_op_eq (const void *p1, const void *p2)
1139 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1140 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1143 if (vno1->hashcode != vno2->hashcode)
1146 if (vno1->opcode != vno2->opcode
1147 || !types_compatible_p (vno1->type, vno2->type))
1150 for (i = 0; i < vno1->length; ++i)
1151 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1157 /* Lookup a n-ary operation by its pieces and return the resulting value
1158 number if it exists in the hash table. Return NULL_TREE if it does
1159 not exist in the hash table or if the result field of the operation
1160 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1164 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1165 tree type, tree op0, tree op1, tree op2,
1166 tree op3, vn_nary_op_t *vnresult)
1169 struct vn_nary_op_s vno1;
1173 vno1.length = length;
1179 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1180 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1182 if (!slot && current_info == optimistic_info)
1183 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1188 *vnresult = (vn_nary_op_t)*slot;
1189 return ((vn_nary_op_t)*slot)->result;
1192 /* Lookup OP in the current hash table, and return the resulting value
1193 number if it exists in the hash table. Return NULL_TREE if it does
1194 not exist in the hash table or if the result field of the operation
1195 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1199 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1202 struct vn_nary_op_s vno1;
1207 vno1.opcode = TREE_CODE (op);
1208 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1209 vno1.type = TREE_TYPE (op);
1210 for (i = 0; i < vno1.length; ++i)
1211 vno1.op[i] = TREE_OPERAND (op, i);
1212 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1213 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1215 if (!slot && current_info == optimistic_info)
1216 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1221 *vnresult = (vn_nary_op_t)*slot;
1222 return ((vn_nary_op_t)*slot)->result;
1225 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1226 value number if it exists in the hash table. Return NULL_TREE if
1227 it does not exist in the hash table. VNRESULT will contain the
1228 vn_nary_op_t from the hashtable if it exists. */
1231 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1234 struct vn_nary_op_s vno1;
1239 vno1.opcode = gimple_assign_rhs_code (stmt);
1240 vno1.length = gimple_num_ops (stmt) - 1;
1241 vno1.type = gimple_expr_type (stmt);
1242 for (i = 0; i < vno1.length; ++i)
1243 vno1.op[i] = gimple_op (stmt, i + 1);
1244 if (vno1.opcode == REALPART_EXPR
1245 || vno1.opcode == IMAGPART_EXPR
1246 || vno1.opcode == VIEW_CONVERT_EXPR)
1247 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1248 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1249 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1251 if (!slot && current_info == optimistic_info)
1252 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1257 *vnresult = (vn_nary_op_t)*slot;
1258 return ((vn_nary_op_t)*slot)->result;
1261 /* Insert a n-ary operation into the current hash table using it's
1262 pieces. Return the vn_nary_op_t structure we created and put in
1266 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1267 tree type, tree op0,
1268 tree op1, tree op2, tree op3,
1270 unsigned int value_id)
1275 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1276 (sizeof (struct vn_nary_op_s)
1277 - sizeof (tree) * (4 - length)));
1278 vno1->value_id = value_id;
1279 vno1->opcode = code;
1280 vno1->length = length;
1290 vno1->result = result;
1291 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1292 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1294 gcc_assert (!*slot);
1301 /* Insert OP into the current hash table with a value number of
1302 RESULT. Return the vn_nary_op_t structure we created and put in
1306 vn_nary_op_insert (tree op, tree result)
1308 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1313 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1314 (sizeof (struct vn_nary_op_s)
1315 - sizeof (tree) * (4 - length)));
1316 vno1->value_id = VN_INFO (result)->value_id;
1317 vno1->opcode = TREE_CODE (op);
1318 vno1->length = length;
1319 vno1->type = TREE_TYPE (op);
1320 for (i = 0; i < vno1->length; ++i)
1321 vno1->op[i] = TREE_OPERAND (op, i);
1322 vno1->result = result;
1323 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1324 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1326 gcc_assert (!*slot);
1332 /* Insert the rhs of STMT into the current hash table with a value number of
1336 vn_nary_op_insert_stmt (gimple stmt, tree result)
1338 unsigned length = gimple_num_ops (stmt) - 1;
1343 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1344 (sizeof (struct vn_nary_op_s)
1345 - sizeof (tree) * (4 - length)));
1346 vno1->value_id = VN_INFO (result)->value_id;
1347 vno1->opcode = gimple_assign_rhs_code (stmt);
1348 vno1->length = length;
1349 vno1->type = gimple_expr_type (stmt);
1350 for (i = 0; i < vno1->length; ++i)
1351 vno1->op[i] = gimple_op (stmt, i + 1);
1352 if (vno1->opcode == REALPART_EXPR
1353 || vno1->opcode == IMAGPART_EXPR
1354 || vno1->opcode == VIEW_CONVERT_EXPR)
1355 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1356 vno1->result = result;
1357 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1358 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1360 gcc_assert (!*slot);
1366 /* Compute a hashcode for PHI operation VP1 and return it. */
1368 static inline hashval_t
1369 vn_phi_compute_hash (vn_phi_t vp1)
1371 hashval_t result = 0;
1376 result = vp1->block->index;
1378 /* If all PHI arguments are constants we need to distinguish
1379 the PHI node via its type. */
1380 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1381 result += (INTEGRAL_TYPE_P (type)
1382 + (INTEGRAL_TYPE_P (type)
1383 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1385 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1387 if (phi1op == VN_TOP)
1389 result += iterative_hash_expr (phi1op, result);
1395 /* Return the computed hashcode for phi operation P1. */
1398 vn_phi_hash (const void *p1)
1400 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1401 return vp1->hashcode;
1404 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1407 vn_phi_eq (const void *p1, const void *p2)
1409 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1410 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1412 if (vp1->hashcode != vp2->hashcode)
1415 if (vp1->block == vp2->block)
1420 /* If the PHI nodes do not have compatible types
1421 they are not the same. */
1422 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1423 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1426 /* Any phi in the same block will have it's arguments in the
1427 same edge order, because of how we store phi nodes. */
1428 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1430 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1431 if (phi1op == VN_TOP || phi2op == VN_TOP)
1433 if (!expressions_equal_p (phi1op, phi2op))
1441 static VEC(tree, heap) *shared_lookup_phiargs;
1443 /* Lookup PHI in the current hash table, and return the resulting
1444 value number if it exists in the hash table. Return NULL_TREE if
1445 it does not exist in the hash table. */
1448 vn_phi_lookup (gimple phi)
1451 struct vn_phi_s vp1;
1454 VEC_truncate (tree, shared_lookup_phiargs, 0);
1456 /* Canonicalize the SSA_NAME's to their value number. */
1457 for (i = 0; i < gimple_phi_num_args (phi); i++)
1459 tree def = PHI_ARG_DEF (phi, i);
1460 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1461 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1463 vp1.phiargs = shared_lookup_phiargs;
1464 vp1.block = gimple_bb (phi);
1465 vp1.hashcode = vn_phi_compute_hash (&vp1);
1466 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1468 if (!slot && current_info == optimistic_info)
1469 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1473 return ((vn_phi_t)*slot)->result;
1476 /* Insert PHI into the current hash table with a value number of
1480 vn_phi_insert (gimple phi, tree result)
1483 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1485 VEC (tree, heap) *args = NULL;
1487 /* Canonicalize the SSA_NAME's to their value number. */
1488 for (i = 0; i < gimple_phi_num_args (phi); i++)
1490 tree def = PHI_ARG_DEF (phi, i);
1491 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1492 VEC_safe_push (tree, heap, args, def);
1494 vp1->value_id = VN_INFO (result)->value_id;
1495 vp1->phiargs = args;
1496 vp1->block = gimple_bb (phi);
1497 vp1->result = result;
1498 vp1->hashcode = vn_phi_compute_hash (vp1);
1500 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1503 /* Because we iterate over phi operations more than once, it's
1504 possible the slot might already exist here, hence no assert.*/
1510 /* Print set of components in strongly connected component SCC to OUT. */
1513 print_scc (FILE *out, VEC (tree, heap) *scc)
1518 fprintf (out, "SCC consists of: ");
1519 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1521 print_generic_expr (out, var, 0);
1524 fprintf (out, "\n");
1527 /* Set the value number of FROM to TO, return true if it has changed
1531 set_ssa_val_to (tree from, tree to)
1536 && TREE_CODE (to) == SSA_NAME
1537 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1540 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1541 and invariants. So assert that here. */
1542 gcc_assert (to != NULL_TREE
1544 || TREE_CODE (to) == SSA_NAME
1545 || is_gimple_min_invariant (to)));
1547 if (dump_file && (dump_flags & TDF_DETAILS))
1549 fprintf (dump_file, "Setting value number of ");
1550 print_generic_expr (dump_file, from, 0);
1551 fprintf (dump_file, " to ");
1552 print_generic_expr (dump_file, to, 0);
1555 currval = SSA_VAL (from);
1557 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1559 VN_INFO (from)->valnum = to;
1560 if (dump_file && (dump_flags & TDF_DETAILS))
1561 fprintf (dump_file, " (changed)\n");
1564 if (dump_file && (dump_flags & TDF_DETAILS))
1565 fprintf (dump_file, "\n");
1569 /* Set all definitions in STMT to value number to themselves.
1570 Return true if a value number changed. */
1573 defs_to_varying (gimple stmt)
1575 bool changed = false;
1579 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1581 tree def = DEF_FROM_PTR (defp);
1583 VN_INFO (def)->use_processed = true;
1584 changed |= set_ssa_val_to (def, def);
1589 static bool expr_has_constants (tree expr);
1590 static tree valueize_expr (tree expr);
1592 /* Visit a copy between LHS and RHS, return true if the value number
1596 visit_copy (tree lhs, tree rhs)
1598 /* Follow chains of copies to their destination. */
1599 while (TREE_CODE (rhs) == SSA_NAME
1600 && SSA_VAL (rhs) != rhs)
1601 rhs = SSA_VAL (rhs);
1603 /* The copy may have a more interesting constant filled expression
1604 (we don't, since we know our RHS is just an SSA name). */
1605 if (TREE_CODE (rhs) == SSA_NAME)
1607 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1608 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1611 return set_ssa_val_to (lhs, rhs);
1614 /* Visit a unary operator RHS, value number it, and return true if the
1615 value number of LHS has changed as a result. */
1618 visit_unary_op (tree lhs, gimple stmt)
1620 bool changed = false;
1621 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1625 changed = set_ssa_val_to (lhs, result);
1629 changed = set_ssa_val_to (lhs, lhs);
1630 vn_nary_op_insert_stmt (stmt, lhs);
1636 /* Visit a binary operator RHS, value number it, and return true if the
1637 value number of LHS has changed as a result. */
1640 visit_binary_op (tree lhs, gimple stmt)
1642 bool changed = false;
1643 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1647 changed = set_ssa_val_to (lhs, result);
1651 changed = set_ssa_val_to (lhs, lhs);
1652 vn_nary_op_insert_stmt (stmt, lhs);
1658 /* Visit a call STMT storing into LHS. Return true if the value number
1659 of the LHS has changed as a result. */
1662 visit_reference_op_call (tree lhs, gimple stmt)
1664 bool changed = false;
1665 struct vn_reference_s vr1;
1667 tree vuse = gimple_vuse (stmt);
1669 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1670 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1671 vr1.hashcode = vn_reference_compute_hash (&vr1);
1672 result = vn_reference_lookup_1 (&vr1, NULL);
1675 changed = set_ssa_val_to (lhs, result);
1676 if (TREE_CODE (result) == SSA_NAME
1677 && VN_INFO (result)->has_constants)
1678 VN_INFO (lhs)->has_constants = true;
1684 changed = set_ssa_val_to (lhs, lhs);
1685 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1686 vr2->vuse = vr1.vuse;
1687 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1688 vr2->hashcode = vr1.hashcode;
1690 slot = htab_find_slot_with_hash (current_info->references,
1691 vr2, vr2->hashcode, INSERT);
1693 free_reference (*slot);
1700 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1701 and return true if the value number of the LHS has changed as a result. */
1704 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1706 bool changed = false;
1707 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1709 /* We handle type-punning through unions by value-numbering based
1710 on offset and size of the access. Be prepared to handle a
1711 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1713 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1715 /* We will be setting the value number of lhs to the value number
1716 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1717 So first simplify and lookup this expression to see if it
1718 is already available. */
1719 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1720 if ((CONVERT_EXPR_P (val)
1721 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1722 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1724 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1725 if ((CONVERT_EXPR_P (tem)
1726 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1727 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1728 TREE_TYPE (val), tem)))
1732 if (!is_gimple_min_invariant (val)
1733 && TREE_CODE (val) != SSA_NAME)
1734 result = vn_nary_op_lookup (val, NULL);
1735 /* If the expression is not yet available, value-number lhs to
1736 a new SSA_NAME we create. */
1737 if (!result && may_insert)
1739 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1740 /* Initialize value-number information properly. */
1741 VN_INFO_GET (result)->valnum = result;
1742 VN_INFO (result)->value_id = get_next_value_id ();
1743 VN_INFO (result)->expr = val;
1744 VN_INFO (result)->has_constants = expr_has_constants (val);
1745 VN_INFO (result)->needs_insertion = true;
1746 /* As all "inserted" statements are singleton SCCs, insert
1747 to the valid table. This is strictly needed to
1748 avoid re-generating new value SSA_NAMEs for the same
1749 expression during SCC iteration over and over (the
1750 optimistic table gets cleared after each iteration).
1751 We do not need to insert into the optimistic table, as
1752 lookups there will fall back to the valid table. */
1753 if (current_info == optimistic_info)
1755 current_info = valid_info;
1756 vn_nary_op_insert (val, result);
1757 current_info = optimistic_info;
1760 vn_nary_op_insert (val, result);
1761 if (dump_file && (dump_flags & TDF_DETAILS))
1763 fprintf (dump_file, "Inserting name ");
1764 print_generic_expr (dump_file, result, 0);
1765 fprintf (dump_file, " for expression ");
1766 print_generic_expr (dump_file, val, 0);
1767 fprintf (dump_file, "\n");
1774 changed = set_ssa_val_to (lhs, result);
1775 if (TREE_CODE (result) == SSA_NAME
1776 && VN_INFO (result)->has_constants)
1778 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1779 VN_INFO (lhs)->has_constants = true;
1784 changed = set_ssa_val_to (lhs, lhs);
1785 vn_reference_insert (op, lhs, gimple_vuse (stmt));
1792 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1793 and return true if the value number of the LHS has changed as a result. */
1796 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1798 bool changed = false;
1800 bool resultsame = false;
1802 /* First we want to lookup using the *vuses* from the store and see
1803 if there the last store to this location with the same address
1806 The vuses represent the memory state before the store. If the
1807 memory state, address, and value of the store is the same as the
1808 last store to this location, then this store will produce the
1809 same memory state as that store.
1811 In this case the vdef versions for this store are value numbered to those
1812 vuse versions, since they represent the same memory state after
1815 Otherwise, the vdefs for the store are used when inserting into
1816 the table, since the store generates a new memory state. */
1818 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1822 if (TREE_CODE (result) == SSA_NAME)
1823 result = SSA_VAL (result);
1824 if (TREE_CODE (op) == SSA_NAME)
1826 resultsame = expressions_equal_p (result, op);
1829 if (!result || !resultsame)
1833 if (dump_file && (dump_flags & TDF_DETAILS))
1835 fprintf (dump_file, "No store match\n");
1836 fprintf (dump_file, "Value numbering store ");
1837 print_generic_expr (dump_file, lhs, 0);
1838 fprintf (dump_file, " to ");
1839 print_generic_expr (dump_file, op, 0);
1840 fprintf (dump_file, "\n");
1842 /* Have to set value numbers before insert, since insert is
1843 going to valueize the references in-place. */
1844 if ((vdef = gimple_vdef (stmt)))
1846 VN_INFO (vdef)->use_processed = true;
1847 changed |= set_ssa_val_to (vdef, vdef);
1850 /* Do not insert structure copies into the tables. */
1851 if (is_gimple_min_invariant (op)
1852 || is_gimple_reg (op))
1853 vn_reference_insert (lhs, op, vdef);
1857 /* We had a match, so value number the vdef to have the value
1858 number of the vuse it came from. */
1861 if (dump_file && (dump_flags & TDF_DETAILS))
1862 fprintf (dump_file, "Store matched earlier value,"
1863 "value numbering store vdefs to matching vuses.\n");
1865 def = gimple_vdef (stmt);
1866 use = gimple_vuse (stmt);
1868 VN_INFO (def)->use_processed = true;
1869 changed |= set_ssa_val_to (def, SSA_VAL (use));
1875 /* Visit and value number PHI, return true if the value number
1879 visit_phi (gimple phi)
1881 bool changed = false;
1883 tree sameval = VN_TOP;
1884 bool allsame = true;
1887 /* TODO: We could check for this in init_sccvn, and replace this
1888 with a gcc_assert. */
1889 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1890 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1892 /* See if all non-TOP arguments have the same value. TOP is
1893 equivalent to everything, so we can ignore it. */
1894 for (i = 0; i < gimple_phi_num_args (phi); i++)
1896 tree def = PHI_ARG_DEF (phi, i);
1898 if (TREE_CODE (def) == SSA_NAME)
1899 def = SSA_VAL (def);
1902 if (sameval == VN_TOP)
1908 if (!expressions_equal_p (def, sameval))
1916 /* If all value numbered to the same value, the phi node has that
1920 if (is_gimple_min_invariant (sameval))
1922 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1923 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1927 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1928 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1931 if (TREE_CODE (sameval) == SSA_NAME)
1932 return visit_copy (PHI_RESULT (phi), sameval);
1934 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1937 /* Otherwise, see if it is equivalent to a phi node in this block. */
1938 result = vn_phi_lookup (phi);
1941 if (TREE_CODE (result) == SSA_NAME)
1942 changed = visit_copy (PHI_RESULT (phi), result);
1944 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1948 vn_phi_insert (phi, PHI_RESULT (phi));
1949 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1950 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1951 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1957 /* Return true if EXPR contains constants. */
1960 expr_has_constants (tree expr)
1962 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1965 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1968 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1969 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1970 /* Constants inside reference ops are rarely interesting, but
1971 it can take a lot of looking to find them. */
1973 case tcc_declaration:
1976 return is_gimple_min_invariant (expr);
1981 /* Return true if STMT contains constants. */
1984 stmt_has_constants (gimple stmt)
1986 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1989 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1991 case GIMPLE_UNARY_RHS:
1992 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1994 case GIMPLE_BINARY_RHS:
1995 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
1996 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
1997 case GIMPLE_SINGLE_RHS:
1998 /* Constants inside reference ops are rarely interesting, but
1999 it can take a lot of looking to find them. */
2000 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2007 /* Replace SSA_NAMES in expr with their value numbers, and return the
2009 This is performed in place. */
2012 valueize_expr (tree expr)
2014 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2017 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2018 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2019 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2022 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2023 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2024 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2025 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2026 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2027 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2035 /* Simplify the binary expression RHS, and return the result if
2039 simplify_binary_expression (gimple stmt)
2041 tree result = NULL_TREE;
2042 tree op0 = gimple_assign_rhs1 (stmt);
2043 tree op1 = gimple_assign_rhs2 (stmt);
2045 /* This will not catch every single case we could combine, but will
2046 catch those with constants. The goal here is to simultaneously
2047 combine constants between expressions, but avoid infinite
2048 expansion of expressions during simplification. */
2049 if (TREE_CODE (op0) == SSA_NAME)
2051 if (VN_INFO (op0)->has_constants
2052 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2053 op0 = valueize_expr (vn_get_expr_for (op0));
2054 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2055 op0 = SSA_VAL (op0);
2058 if (TREE_CODE (op1) == SSA_NAME)
2060 if (VN_INFO (op1)->has_constants)
2061 op1 = valueize_expr (vn_get_expr_for (op1));
2062 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2063 op1 = SSA_VAL (op1);
2066 /* Avoid folding if nothing changed. */
2067 if (op0 == gimple_assign_rhs1 (stmt)
2068 && op1 == gimple_assign_rhs2 (stmt))
2071 fold_defer_overflow_warnings ();
2073 result = fold_binary (gimple_assign_rhs_code (stmt),
2074 gimple_expr_type (stmt), op0, op1);
2076 STRIP_USELESS_TYPE_CONVERSION (result);
2078 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2081 /* Make sure result is not a complex expression consisting
2082 of operators of operators (IE (a + b) + (a + c))
2083 Otherwise, we will end up with unbounded expressions if
2084 fold does anything at all. */
2085 if (result && valid_gimple_rhs_p (result))
2091 /* Simplify the unary expression RHS, and return the result if
2095 simplify_unary_expression (gimple stmt)
2097 tree result = NULL_TREE;
2098 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2100 /* We handle some tcc_reference codes here that are all
2101 GIMPLE_ASSIGN_SINGLE codes. */
2102 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2103 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2104 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2105 op0 = TREE_OPERAND (op0, 0);
2107 if (TREE_CODE (op0) != SSA_NAME)
2111 if (VN_INFO (op0)->has_constants)
2112 op0 = valueize_expr (vn_get_expr_for (op0));
2113 else if (gimple_assign_cast_p (stmt)
2114 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2115 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2116 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2118 /* We want to do tree-combining on conversion-like expressions.
2119 Make sure we feed only SSA_NAMEs or constants to fold though. */
2120 tree tem = valueize_expr (vn_get_expr_for (op0));
2121 if (UNARY_CLASS_P (tem)
2122 || BINARY_CLASS_P (tem)
2123 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2124 || TREE_CODE (tem) == SSA_NAME
2125 || is_gimple_min_invariant (tem))
2129 /* Avoid folding if nothing changed, but remember the expression. */
2130 if (op0 == orig_op0)
2133 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2134 gimple_expr_type (stmt), op0);
2137 STRIP_USELESS_TYPE_CONVERSION (result);
2138 if (valid_gimple_rhs_p (result))
2145 /* Try to simplify RHS using equivalences and constant folding. */
2148 try_to_simplify (gimple stmt)
2152 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2153 in this case, there is no point in doing extra work. */
2154 if (gimple_assign_copy_p (stmt)
2155 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2158 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2160 case tcc_declaration:
2161 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2167 /* Do not do full-blown reference lookup here, but simplify
2168 reads from constant aggregates. */
2169 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2173 /* Fallthrough for some codes that can operate on registers. */
2174 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2175 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2176 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2178 /* We could do a little more with unary ops, if they expand
2179 into binary ops, but it's debatable whether it is worth it. */
2181 return simplify_unary_expression (stmt);
2183 case tcc_comparison:
2185 return simplify_binary_expression (stmt);
2194 /* Visit and value number USE, return true if the value number
2198 visit_use (tree use)
2200 bool changed = false;
2201 gimple stmt = SSA_NAME_DEF_STMT (use);
2203 VN_INFO (use)->use_processed = true;
2205 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2206 if (dump_file && (dump_flags & TDF_DETAILS)
2207 && !SSA_NAME_IS_DEFAULT_DEF (use))
2209 fprintf (dump_file, "Value numbering ");
2210 print_generic_expr (dump_file, use, 0);
2211 fprintf (dump_file, " stmt = ");
2212 print_gimple_stmt (dump_file, stmt, 0, 0);
2215 /* Handle uninitialized uses. */
2216 if (SSA_NAME_IS_DEFAULT_DEF (use))
2217 changed = set_ssa_val_to (use, use);
2220 if (gimple_code (stmt) == GIMPLE_PHI)
2221 changed = visit_phi (stmt);
2222 else if (!gimple_has_lhs (stmt)
2223 || gimple_has_volatile_ops (stmt)
2224 || stmt_could_throw_p (stmt))
2225 changed = defs_to_varying (stmt);
2226 else if (is_gimple_assign (stmt))
2228 tree lhs = gimple_assign_lhs (stmt);
2231 /* Shortcut for copies. Simplifying copies is pointless,
2232 since we copy the expression and value they represent. */
2233 if (gimple_assign_copy_p (stmt)
2234 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2235 && TREE_CODE (lhs) == SSA_NAME)
2237 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2240 simplified = try_to_simplify (stmt);
2243 if (dump_file && (dump_flags & TDF_DETAILS))
2245 fprintf (dump_file, "RHS ");
2246 print_gimple_expr (dump_file, stmt, 0, 0);
2247 fprintf (dump_file, " simplified to ");
2248 print_generic_expr (dump_file, simplified, 0);
2249 if (TREE_CODE (lhs) == SSA_NAME)
2250 fprintf (dump_file, " has constants %d\n",
2251 expr_has_constants (simplified));
2253 fprintf (dump_file, "\n");
2256 /* Setting value numbers to constants will occasionally
2257 screw up phi congruence because constants are not
2258 uniquely associated with a single ssa name that can be
2261 && is_gimple_min_invariant (simplified)
2262 && TREE_CODE (lhs) == SSA_NAME)
2264 VN_INFO (lhs)->expr = simplified;
2265 VN_INFO (lhs)->has_constants = true;
2266 changed = set_ssa_val_to (lhs, simplified);
2270 && TREE_CODE (simplified) == SSA_NAME
2271 && TREE_CODE (lhs) == SSA_NAME)
2273 changed = visit_copy (lhs, simplified);
2276 else if (simplified)
2278 if (TREE_CODE (lhs) == SSA_NAME)
2280 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2281 /* We have to unshare the expression or else
2282 valuizing may change the IL stream. */
2283 VN_INFO (lhs)->expr = unshare_expr (simplified);
2286 else if (stmt_has_constants (stmt)
2287 && TREE_CODE (lhs) == SSA_NAME)
2288 VN_INFO (lhs)->has_constants = true;
2289 else if (TREE_CODE (lhs) == SSA_NAME)
2291 /* We reset expr and constantness here because we may
2292 have been value numbering optimistically, and
2293 iterating. They may become non-constant in this case,
2294 even if they were optimistically constant. */
2296 VN_INFO (lhs)->has_constants = false;
2297 VN_INFO (lhs)->expr = NULL_TREE;
2300 if ((TREE_CODE (lhs) == SSA_NAME
2301 /* We can substitute SSA_NAMEs that are live over
2302 abnormal edges with their constant value. */
2303 && !(gimple_assign_copy_p (stmt)
2304 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2306 && is_gimple_min_invariant (simplified))
2307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2308 /* Stores or copies from SSA_NAMEs that are live over
2309 abnormal edges are a problem. */
2310 || (gimple_assign_single_p (stmt)
2311 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2312 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2313 changed = defs_to_varying (stmt);
2314 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2316 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2318 else if (TREE_CODE (lhs) == SSA_NAME)
2320 if ((gimple_assign_copy_p (stmt)
2321 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2323 && is_gimple_min_invariant (simplified)))
2325 VN_INFO (lhs)->has_constants = true;
2327 changed = set_ssa_val_to (lhs, simplified);
2329 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2333 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2335 case GIMPLE_UNARY_RHS:
2336 changed = visit_unary_op (lhs, stmt);
2338 case GIMPLE_BINARY_RHS:
2339 changed = visit_binary_op (lhs, stmt);
2341 case GIMPLE_SINGLE_RHS:
2342 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2345 /* VOP-less references can go through unary case. */
2346 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2347 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2348 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2349 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2351 changed = visit_unary_op (lhs, stmt);
2355 case tcc_declaration:
2356 changed = visit_reference_op_load
2357 (lhs, gimple_assign_rhs1 (stmt), stmt);
2359 case tcc_expression:
2360 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2362 changed = visit_unary_op (lhs, stmt);
2367 changed = defs_to_varying (stmt);
2371 changed = defs_to_varying (stmt);
2377 changed = defs_to_varying (stmt);
2379 else if (is_gimple_call (stmt))
2381 tree lhs = gimple_call_lhs (stmt);
2383 /* ??? We could try to simplify calls. */
2385 if (stmt_has_constants (stmt)
2386 && TREE_CODE (lhs) == SSA_NAME)
2387 VN_INFO (lhs)->has_constants = true;
2388 else if (TREE_CODE (lhs) == SSA_NAME)
2390 /* We reset expr and constantness here because we may
2391 have been value numbering optimistically, and
2392 iterating. They may become non-constant in this case,
2393 even if they were optimistically constant. */
2394 VN_INFO (lhs)->has_constants = false;
2395 VN_INFO (lhs)->expr = NULL_TREE;
2398 if (TREE_CODE (lhs) == SSA_NAME
2399 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2400 changed = defs_to_varying (stmt);
2401 /* ??? We should handle stores from calls. */
2402 else if (TREE_CODE (lhs) == SSA_NAME)
2404 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2405 changed = visit_reference_op_call (lhs, stmt);
2407 changed = defs_to_varying (stmt);
2410 changed = defs_to_varying (stmt);
2417 /* Compare two operands by reverse postorder index */
2420 compare_ops (const void *pa, const void *pb)
2422 const tree opa = *((const tree *)pa);
2423 const tree opb = *((const tree *)pb);
2424 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2425 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2429 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2430 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2431 else if (gimple_nop_p (opstmta))
2433 else if (gimple_nop_p (opstmtb))
2436 bba = gimple_bb (opstmta);
2437 bbb = gimple_bb (opstmtb);
2440 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2448 if (gimple_code (opstmta) == GIMPLE_PHI
2449 && gimple_code (opstmtb) == GIMPLE_PHI)
2450 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2451 else if (gimple_code (opstmta) == GIMPLE_PHI)
2453 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2455 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2456 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2458 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2460 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2463 /* Sort an array containing members of a strongly connected component
2464 SCC so that the members are ordered by RPO number.
2465 This means that when the sort is complete, iterating through the
2466 array will give you the members in RPO order. */
2469 sort_scc (VEC (tree, heap) *scc)
2471 qsort (VEC_address (tree, scc),
2472 VEC_length (tree, scc),
2477 /* Process a strongly connected component in the SSA graph. */
2480 process_scc (VEC (tree, heap) *scc)
2482 /* If the SCC has a single member, just visit it. */
2484 if (VEC_length (tree, scc) == 1)
2486 tree use = VEC_index (tree, scc, 0);
2487 if (!VN_INFO (use)->use_processed)
2494 unsigned int iterations = 0;
2495 bool changed = true;
2497 /* Iterate over the SCC with the optimistic table until it stops
2499 current_info = optimistic_info;
2504 /* As we are value-numbering optimistically we have to
2505 clear the expression tables and the simplified expressions
2506 in each iteration until we converge. */
2507 htab_empty (optimistic_info->nary);
2508 htab_empty (optimistic_info->phis);
2509 htab_empty (optimistic_info->references);
2510 obstack_free (&optimistic_info->nary_obstack, NULL);
2511 gcc_obstack_init (&optimistic_info->nary_obstack);
2512 empty_alloc_pool (optimistic_info->phis_pool);
2513 empty_alloc_pool (optimistic_info->references_pool);
2514 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2515 VN_INFO (var)->expr = NULL_TREE;
2516 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2517 changed |= visit_use (var);
2520 statistics_histogram_event (cfun, "SCC iterations", iterations);
2522 /* Finally, visit the SCC once using the valid table. */
2523 current_info = valid_info;
2524 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2529 DEF_VEC_O(ssa_op_iter);
2530 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2532 /* Pop the components of the found SCC for NAME off the SCC stack
2533 and process them. Returns true if all went well, false if
2534 we run into resource limits. */
2537 extract_and_process_scc_for_name (tree name)
2539 VEC (tree, heap) *scc = NULL;
2542 /* Found an SCC, pop the components off the SCC stack and
2546 x = VEC_pop (tree, sccstack);
2548 VN_INFO (x)->on_sccstack = false;
2549 VEC_safe_push (tree, heap, scc, x);
2550 } while (x != name);
2552 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2553 if (VEC_length (tree, scc)
2554 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2557 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2558 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2559 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2563 if (VEC_length (tree, scc) > 1)
2566 if (dump_file && (dump_flags & TDF_DETAILS))
2567 print_scc (dump_file, scc);
2571 VEC_free (tree, heap, scc);
2576 /* Depth first search on NAME to discover and process SCC's in the SSA
2578 Execution of this algorithm relies on the fact that the SCC's are
2579 popped off the stack in topological order.
2580 Returns true if successful, false if we stopped processing SCC's due
2581 to resource constraints. */
2586 VEC(ssa_op_iter, heap) *itervec = NULL;
2587 VEC(tree, heap) *namevec = NULL;
2588 use_operand_p usep = NULL;
2595 VN_INFO (name)->dfsnum = next_dfs_num++;
2596 VN_INFO (name)->visited = true;
2597 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2599 VEC_safe_push (tree, heap, sccstack, name);
2600 VN_INFO (name)->on_sccstack = true;
2601 defstmt = SSA_NAME_DEF_STMT (name);
2603 /* Recursively DFS on our operands, looking for SCC's. */
2604 if (!gimple_nop_p (defstmt))
2606 /* Push a new iterator. */
2607 if (gimple_code (defstmt) == GIMPLE_PHI)
2608 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2610 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2613 clear_and_done_ssa_iter (&iter);
2617 /* If we are done processing uses of a name, go up the stack
2618 of iterators and process SCCs as we found them. */
2619 if (op_iter_done (&iter))
2621 /* See if we found an SCC. */
2622 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2623 if (!extract_and_process_scc_for_name (name))
2625 VEC_free (tree, heap, namevec);
2626 VEC_free (ssa_op_iter, heap, itervec);
2630 /* Check if we are done. */
2631 if (VEC_empty (tree, namevec))
2633 VEC_free (tree, heap, namevec);
2634 VEC_free (ssa_op_iter, heap, itervec);
2638 /* Restore the last use walker and continue walking there. */
2640 name = VEC_pop (tree, namevec);
2641 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2642 sizeof (ssa_op_iter));
2643 VEC_pop (ssa_op_iter, itervec);
2644 goto continue_walking;
2647 use = USE_FROM_PTR (usep);
2649 /* Since we handle phi nodes, we will sometimes get
2650 invariants in the use expression. */
2651 if (TREE_CODE (use) == SSA_NAME)
2653 if (! (VN_INFO (use)->visited))
2655 /* Recurse by pushing the current use walking state on
2656 the stack and starting over. */
2657 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2658 VEC_safe_push(tree, heap, namevec, name);
2663 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2664 VN_INFO (use)->low);
2666 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2667 && VN_INFO (use)->on_sccstack)
2669 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2670 VN_INFO (name)->low);
2674 usep = op_iter_next_use (&iter);
2678 /* Allocate a value number table. */
2681 allocate_vn_table (vn_tables_t table)
2683 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2684 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2685 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2688 gcc_obstack_init (&table->nary_obstack);
2689 table->phis_pool = create_alloc_pool ("VN phis",
2690 sizeof (struct vn_phi_s),
2692 table->references_pool = create_alloc_pool ("VN references",
2693 sizeof (struct vn_reference_s),
2697 /* Free a value number table. */
2700 free_vn_table (vn_tables_t table)
2702 htab_delete (table->phis);
2703 htab_delete (table->nary);
2704 htab_delete (table->references);
2705 obstack_free (&table->nary_obstack, NULL);
2706 free_alloc_pool (table->phis_pool);
2707 free_alloc_pool (table->references_pool);
2715 int *rpo_numbers_temp;
2717 calculate_dominance_info (CDI_DOMINATORS);
2719 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2722 constant_value_ids = BITMAP_ALLOC (NULL);
2727 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2728 /* VEC_alloc doesn't actually grow it to the right size, it just
2729 preallocates the space to do so. */
2730 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2731 gcc_obstack_init (&vn_ssa_aux_obstack);
2733 shared_lookup_phiargs = NULL;
2734 shared_lookup_references = NULL;
2735 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2736 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2737 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2739 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2740 the i'th block in RPO order is bb. We want to map bb's to RPO
2741 numbers, so we need to rearrange this array. */
2742 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2743 rpo_numbers[rpo_numbers_temp[j]] = j;
2745 XDELETE (rpo_numbers_temp);
2747 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2749 /* Create the VN_INFO structures, and initialize value numbers to
2751 for (i = 0; i < num_ssa_names; i++)
2753 tree name = ssa_name (i);
2756 VN_INFO_GET (name)->valnum = VN_TOP;
2757 VN_INFO (name)->expr = NULL_TREE;
2758 VN_INFO (name)->value_id = 0;
2762 renumber_gimple_stmt_uids ();
2764 /* Create the valid and optimistic value numbering tables. */
2765 valid_info = XCNEW (struct vn_tables_s);
2766 allocate_vn_table (valid_info);
2767 optimistic_info = XCNEW (struct vn_tables_s);
2768 allocate_vn_table (optimistic_info);
2776 htab_delete (constant_to_value_id);
2777 BITMAP_FREE (constant_value_ids);
2778 VEC_free (tree, heap, shared_lookup_phiargs);
2779 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2780 XDELETEVEC (rpo_numbers);
2782 for (i = 0; i < num_ssa_names; i++)
2784 tree name = ssa_name (i);
2786 && VN_INFO (name)->needs_insertion)
2787 release_ssa_name (name);
2789 obstack_free (&vn_ssa_aux_obstack, NULL);
2790 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2792 VEC_free (tree, heap, sccstack);
2793 free_vn_table (valid_info);
2794 XDELETE (valid_info);
2795 free_vn_table (optimistic_info);
2796 XDELETE (optimistic_info);
2799 /* Set the value ids in the valid hash tables. */
2802 set_hashtable_value_ids (void)
2809 /* Now set the value ids of the things we had put in the hash
2812 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2813 vno, vn_nary_op_t, hi)
2817 if (TREE_CODE (vno->result) == SSA_NAME)
2818 vno->value_id = VN_INFO (vno->result)->value_id;
2819 else if (is_gimple_min_invariant (vno->result))
2820 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2824 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2829 if (TREE_CODE (vp->result) == SSA_NAME)
2830 vp->value_id = VN_INFO (vp->result)->value_id;
2831 else if (is_gimple_min_invariant (vp->result))
2832 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2836 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2837 vr, vn_reference_t, hi)
2841 if (TREE_CODE (vr->result) == SSA_NAME)
2842 vr->value_id = VN_INFO (vr->result)->value_id;
2843 else if (is_gimple_min_invariant (vr->result))
2844 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2849 /* Do SCCVN. Returns true if it finished, false if we bailed out
2850 due to resource constraints. */
2853 run_scc_vn (bool may_insert_arg)
2857 bool changed = true;
2859 may_insert = may_insert_arg;
2862 current_info = valid_info;
2864 for (param = DECL_ARGUMENTS (current_function_decl);
2866 param = TREE_CHAIN (param))
2868 if (gimple_default_def (cfun, param) != NULL)
2870 tree def = gimple_default_def (cfun, param);
2871 VN_INFO (def)->valnum = def;
2875 for (i = 1; i < num_ssa_names; ++i)
2877 tree name = ssa_name (i);
2879 && VN_INFO (name)->visited == false
2880 && !has_zero_uses (name))
2889 /* Initialize the value ids. */
2891 for (i = 1; i < num_ssa_names; ++i)
2893 tree name = ssa_name (i);
2897 info = VN_INFO (name);
2898 if (info->valnum == name
2899 || info->valnum == VN_TOP)
2900 info->value_id = get_next_value_id ();
2901 else if (is_gimple_min_invariant (info->valnum))
2902 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2905 /* Propagate until they stop changing. */
2909 for (i = 1; i < num_ssa_names; ++i)
2911 tree name = ssa_name (i);
2915 info = VN_INFO (name);
2916 if (TREE_CODE (info->valnum) == SSA_NAME
2917 && info->valnum != name
2918 && info->value_id != VN_INFO (info->valnum)->value_id)
2921 info->value_id = VN_INFO (info->valnum)->value_id;
2926 set_hashtable_value_ids ();
2928 if (dump_file && (dump_flags & TDF_DETAILS))
2930 fprintf (dump_file, "Value numbers:\n");
2931 for (i = 0; i < num_ssa_names; i++)
2933 tree name = ssa_name (i);
2935 && VN_INFO (name)->visited
2936 && SSA_VAL (name) != name)
2938 print_generic_expr (dump_file, name, 0);
2939 fprintf (dump_file, " = ");
2940 print_generic_expr (dump_file, SSA_VAL (name), 0);
2941 fprintf (dump_file, "\n");
2950 /* Return the maximum value id we have ever seen. */
2953 get_max_value_id (void)
2955 return next_value_id;
2958 /* Return the next unique value id. */
2961 get_next_value_id (void)
2963 return next_value_id++;
2967 /* Compare two expressions E1 and E2 and return true if they are equal. */
2970 expressions_equal_p (tree e1, tree e2)
2972 /* The obvious case. */
2976 /* If only one of them is null, they cannot be equal. */
2980 /* Recurse on elements of lists. */
2981 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2985 for (lop1 = e1, lop2 = e2;
2987 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2991 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2997 /* Now perform the actual comparison. */
2998 if (TREE_CODE (e1) == TREE_CODE (e2)
2999 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3006 /* Return true if the nary operation NARY may trap. This is a copy
3007 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3010 vn_nary_may_trap (vn_nary_op_t nary)
3014 bool honor_nans = false;
3015 bool honor_snans = false;
3016 bool fp_operation = false;
3017 bool honor_trapv = false;
3021 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3022 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3023 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3026 fp_operation = FLOAT_TYPE_P (type);
3029 honor_nans = flag_trapping_math && !flag_finite_math_only;
3030 honor_snans = flag_signaling_nans != 0;
3032 else if (INTEGRAL_TYPE_P (type)
3033 && TYPE_OVERFLOW_TRAPS (type))
3037 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3039 honor_nans, honor_snans, rhs2,
3045 for (i = 0; i < nary->length; ++i)
3046 if (tree_could_trap_p (nary->op[i]))